Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/heap.cc

Issue 844006: Merge changes up to V8 version 2.1.3 into the partial snapshots (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/partial_snapshots/
Patch Set: Created 10 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 28 matching lines...) Expand all
39 #include "natives.h" 39 #include "natives.h"
40 #include "scanner.h" 40 #include "scanner.h"
41 #include "scopeinfo.h" 41 #include "scopeinfo.h"
42 #include "snapshot.h" 42 #include "snapshot.h"
43 #include "v8threads.h" 43 #include "v8threads.h"
44 #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP 44 #if V8_TARGET_ARCH_ARM && V8_NATIVE_REGEXP
45 #include "regexp-macro-assembler.h" 45 #include "regexp-macro-assembler.h"
46 #include "arm/regexp-macro-assembler-arm.h" 46 #include "arm/regexp-macro-assembler-arm.h"
47 #endif 47 #endif
48 48
49
49 namespace v8 { 50 namespace v8 {
50 namespace internal { 51 namespace internal {
51 52
52 53
53 String* Heap::hidden_symbol_; 54 String* Heap::hidden_symbol_;
54 Object* Heap::roots_[Heap::kRootListLength]; 55 Object* Heap::roots_[Heap::kRootListLength];
55 56
56 57
57 NewSpace Heap::new_space_; 58 NewSpace Heap::new_space_;
58 OldSpace* Heap::old_pointer_space_ = NULL; 59 OldSpace* Heap::old_pointer_space_ = NULL;
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
108 int Heap::survived_since_last_expansion_ = 0; 109 int Heap::survived_since_last_expansion_ = 0;
109 int Heap::external_allocation_limit_ = 0; 110 int Heap::external_allocation_limit_ = 0;
110 111
111 Heap::HeapState Heap::gc_state_ = NOT_IN_GC; 112 Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
112 113
113 int Heap::mc_count_ = 0; 114 int Heap::mc_count_ = 0;
114 int Heap::gc_count_ = 0; 115 int Heap::gc_count_ = 0;
115 116
116 int Heap::always_allocate_scope_depth_ = 0; 117 int Heap::always_allocate_scope_depth_ = 0;
117 int Heap::linear_allocation_scope_depth_ = 0; 118 int Heap::linear_allocation_scope_depth_ = 0;
118 bool Heap::context_disposed_pending_ = false; 119 int Heap::contexts_disposed_ = 0;
119 120
120 #ifdef DEBUG 121 #ifdef DEBUG
121 bool Heap::allocation_allowed_ = true; 122 bool Heap::allocation_allowed_ = true;
122 123
123 int Heap::allocation_timeout_ = 0; 124 int Heap::allocation_timeout_ = 0;
124 bool Heap::disallow_allocation_failure_ = false; 125 bool Heap::disallow_allocation_failure_ = false;
125 #endif // DEBUG 126 #endif // DEBUG
126 127
127 128
128 int Heap::Capacity() { 129 int Heap::Capacity() {
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after
364 void Heap::CollectAllGarbage(bool force_compaction) { 365 void Heap::CollectAllGarbage(bool force_compaction) {
365 // Since we are ignoring the return value, the exact choice of space does 366 // Since we are ignoring the return value, the exact choice of space does
366 // not matter, so long as we do not specify NEW_SPACE, which would not 367 // not matter, so long as we do not specify NEW_SPACE, which would not
367 // cause a full GC. 368 // cause a full GC.
368 MarkCompactCollector::SetForceCompaction(force_compaction); 369 MarkCompactCollector::SetForceCompaction(force_compaction);
369 CollectGarbage(0, OLD_POINTER_SPACE); 370 CollectGarbage(0, OLD_POINTER_SPACE);
370 MarkCompactCollector::SetForceCompaction(false); 371 MarkCompactCollector::SetForceCompaction(false);
371 } 372 }
372 373
373 374
374 void Heap::CollectAllGarbageIfContextDisposed() {
375 // If the garbage collector interface is exposed through the global
376 // gc() function, we avoid being clever about forcing GCs when
377 // contexts are disposed and leave it to the embedder to make
378 // informed decisions about when to force a collection.
379 if (!FLAG_expose_gc && context_disposed_pending_) {
380 HistogramTimerScope scope(&Counters::gc_context);
381 CollectAllGarbage(false);
382 }
383 context_disposed_pending_ = false;
384 }
385
386
387 void Heap::NotifyContextDisposed() {
388 context_disposed_pending_ = true;
389 }
390
391
392 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { 375 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) {
393 // The VM is in the GC state until exiting this function. 376 // The VM is in the GC state until exiting this function.
394 VMState state(GC); 377 VMState state(GC);
395 378
396 #ifdef DEBUG 379 #ifdef DEBUG
397 // Reset the allocation timeout to the GC interval, but make sure to 380 // Reset the allocation timeout to the GC interval, but make sure to
398 // allow at least a few allocations after a collection. The reason 381 // allow at least a few allocations after a collection. The reason
399 // for this is that we have a lot of allocation sequences and we 382 // for this is that we have a lot of allocation sequences and we
400 // assume that a garbage collection will allow the subsequent 383 // assume that a garbage collection will allow the subsequent
401 // allocation attempts to go through. 384 // allocation attempts to go through.
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
551 V8::FatalProcessOutOfMemory("Committing semi space failed."); 534 V8::FatalProcessOutOfMemory("Committing semi space failed.");
552 } 535 }
553 536
554 537
555 void Heap::PerformGarbageCollection(AllocationSpace space, 538 void Heap::PerformGarbageCollection(AllocationSpace space,
556 GarbageCollector collector, 539 GarbageCollector collector,
557 GCTracer* tracer) { 540 GCTracer* tracer) {
558 VerifySymbolTable(); 541 VerifySymbolTable();
559 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 542 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
560 ASSERT(!allocation_allowed_); 543 ASSERT(!allocation_allowed_);
544 GCTracer::ExternalScope scope(tracer);
561 global_gc_prologue_callback_(); 545 global_gc_prologue_callback_();
562 } 546 }
563 EnsureFromSpaceIsCommitted(); 547 EnsureFromSpaceIsCommitted();
548
549 // Perform mark-sweep with optional compaction.
564 if (collector == MARK_COMPACTOR) { 550 if (collector == MARK_COMPACTOR) {
565 MarkCompact(tracer); 551 MarkCompact(tracer);
552 }
566 553
554 // Always perform a scavenge to make room in new space.
555 Scavenge();
556
557 // Update the old space promotion limits after the scavenge due to
558 // promotions during scavenge.
559 if (collector == MARK_COMPACTOR) {
567 int old_gen_size = PromotedSpaceSize(); 560 int old_gen_size = PromotedSpaceSize();
568 old_gen_promotion_limit_ = 561 old_gen_promotion_limit_ =
569 old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3); 562 old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
570 old_gen_allocation_limit_ = 563 old_gen_allocation_limit_ =
571 old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2); 564 old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
572 old_gen_exhausted_ = false; 565 old_gen_exhausted_ = false;
573 } 566 }
574 Scavenge();
575 567
576 Counters::objs_since_last_young.Set(0); 568 Counters::objs_since_last_young.Set(0);
577 569
578 if (collector == MARK_COMPACTOR) { 570 if (collector == MARK_COMPACTOR) {
579 DisableAssertNoAllocation allow_allocation; 571 DisableAssertNoAllocation allow_allocation;
572 GCTracer::ExternalScope scope(tracer);
580 GlobalHandles::PostGarbageCollectionProcessing(); 573 GlobalHandles::PostGarbageCollectionProcessing();
581 } 574 }
582 575
583 // Update relocatables. 576 // Update relocatables.
584 Relocatable::PostGarbageCollectionProcessing(); 577 Relocatable::PostGarbageCollectionProcessing();
585 578
586 if (collector == MARK_COMPACTOR) { 579 if (collector == MARK_COMPACTOR) {
587 // Register the amount of external allocated memory. 580 // Register the amount of external allocated memory.
588 amount_of_external_allocated_memory_at_last_global_gc_ = 581 amount_of_external_allocated_memory_at_last_global_gc_ =
589 amount_of_external_allocated_memory_; 582 amount_of_external_allocated_memory_;
590 } 583 }
591 584
592 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) { 585 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
593 ASSERT(!allocation_allowed_); 586 ASSERT(!allocation_allowed_);
587 GCTracer::ExternalScope scope(tracer);
594 global_gc_epilogue_callback_(); 588 global_gc_epilogue_callback_();
595 } 589 }
596 VerifySymbolTable(); 590 VerifySymbolTable();
597 } 591 }
598 592
599 593
600 void Heap::MarkCompact(GCTracer* tracer) { 594 void Heap::MarkCompact(GCTracer* tracer) {
601 gc_state_ = MARK_COMPACT; 595 gc_state_ = MARK_COMPACT;
602 mc_count_++; 596 mc_count_++;
603 tracer->set_full_gc_count(mc_count_); 597 tracer->set_full_gc_count(mc_count_);
604 LOG(ResourceEvent("markcompact", "begin")); 598 LOG(ResourceEvent("markcompact", "begin"));
605 599
606 MarkCompactCollector::Prepare(tracer); 600 MarkCompactCollector::Prepare(tracer);
607 601
608 bool is_compacting = MarkCompactCollector::IsCompacting(); 602 bool is_compacting = MarkCompactCollector::IsCompacting();
609 603
610 MarkCompactPrologue(is_compacting); 604 MarkCompactPrologue(is_compacting);
611 605
612 MarkCompactCollector::CollectGarbage(); 606 MarkCompactCollector::CollectGarbage();
613 607
614 MarkCompactEpilogue(is_compacting); 608 MarkCompactEpilogue(is_compacting);
615 609
616 LOG(ResourceEvent("markcompact", "end")); 610 LOG(ResourceEvent("markcompact", "end"));
617 611
618 gc_state_ = NOT_IN_GC; 612 gc_state_ = NOT_IN_GC;
619 613
620 Shrink(); 614 Shrink();
621 615
622 Counters::objs_since_last_full.Set(0); 616 Counters::objs_since_last_full.Set(0);
623 context_disposed_pending_ = false; 617
618 contexts_disposed_ = 0;
624 } 619 }
625 620
626 621
627 void Heap::MarkCompactPrologue(bool is_compacting) { 622 void Heap::MarkCompactPrologue(bool is_compacting) {
628 // At any old GC clear the keyed lookup cache to enable collection of unused 623 // At any old GC clear the keyed lookup cache to enable collection of unused
629 // maps. 624 // maps.
630 KeyedLookupCache::Clear(); 625 KeyedLookupCache::Clear();
631 ContextSlotCache::Clear(); 626 ContextSlotCache::Clear();
632 DescriptorLookupCache::Clear(); 627 DescriptorLookupCache::Clear();
633 628
(...skipping 580 matching lines...) Expand 10 before | Expand all | Expand 10 after
1214 // If the map object is aligned fill the padding area with Smi 0 objects. 1209 // If the map object is aligned fill the padding area with Smi 0 objects.
1215 if (Map::kPadStart < Map::kSize) { 1210 if (Map::kPadStart < Map::kSize) {
1216 memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag, 1211 memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag,
1217 0, 1212 0,
1218 Map::kSize - Map::kPadStart); 1213 Map::kSize - Map::kPadStart);
1219 } 1214 }
1220 return map; 1215 return map;
1221 } 1216 }
1222 1217
1223 1218
1219 Object* Heap::AllocateCodeCache() {
1220 Object* result = AllocateStruct(CODE_CACHE_TYPE);
1221 if (result->IsFailure()) return result;
1222 CodeCache* code_cache = CodeCache::cast(result);
1223 code_cache->set_default_cache(empty_fixed_array());
1224 code_cache->set_normal_type_cache(undefined_value());
1225 return code_cache;
1226 }
1227
1228
1224 const Heap::StringTypeTable Heap::string_type_table[] = { 1229 const Heap::StringTypeTable Heap::string_type_table[] = {
1225 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ 1230 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
1226 {type, size, k##camel_name##MapRootIndex}, 1231 {type, size, k##camel_name##MapRootIndex},
1227 STRING_TYPE_LIST(STRING_TYPE_ELEMENT) 1232 STRING_TYPE_LIST(STRING_TYPE_ELEMENT)
1228 #undef STRING_TYPE_ELEMENT 1233 #undef STRING_TYPE_ELEMENT
1229 }; 1234 };
1230 1235
1231 1236
1232 const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = { 1237 const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = {
1233 #define CONSTANT_SYMBOL_ELEMENT(name, contents) \ 1238 #define CONSTANT_SYMBOL_ELEMENT(name, contents) \
(...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after
1630 // is set to avoid expanding the dictionary during bootstrapping. 1635 // is set to avoid expanding the dictionary during bootstrapping.
1631 obj = NumberDictionary::Allocate(64); 1636 obj = NumberDictionary::Allocate(64);
1632 if (obj->IsFailure()) return false; 1637 if (obj->IsFailure()) return false;
1633 set_non_monomorphic_cache(NumberDictionary::cast(obj)); 1638 set_non_monomorphic_cache(NumberDictionary::cast(obj));
1634 1639
1635 CreateFixedStubs(); 1640 CreateFixedStubs();
1636 1641
1637 if (InitializeNumberStringCache()->IsFailure()) return false; 1642 if (InitializeNumberStringCache()->IsFailure()) return false;
1638 1643
1639 // Allocate cache for single character strings. 1644 // Allocate cache for single character strings.
1640 obj = AllocateFixedArray(String::kMaxAsciiCharCode+1); 1645 obj = AllocateFixedArray(String::kMaxAsciiCharCode+1, TENURED);
1641 if (obj->IsFailure()) return false; 1646 if (obj->IsFailure()) return false;
1642 set_single_character_string_cache(FixedArray::cast(obj)); 1647 set_single_character_string_cache(FixedArray::cast(obj));
1643 1648
1644 // Allocate cache for external strings pointing to native source code. 1649 // Allocate cache for external strings pointing to native source code.
1645 obj = AllocateFixedArray(Natives::GetBuiltinsCount()); 1650 obj = AllocateFixedArray(Natives::GetBuiltinsCount());
1646 if (obj->IsFailure()) return false; 1651 if (obj->IsFailure()) return false;
1647 set_natives_source_cache(FixedArray::cast(obj)); 1652 set_natives_source_cache(FixedArray::cast(obj));
1648 1653
1649 // Handling of script id generation is in Factory::NewScript. 1654 // Handling of script id generation is in Factory::NewScript.
1650 set_last_script_id(undefined_value()); 1655 set_last_script_id(undefined_value());
(...skipping 13 matching lines...) Expand all
1664 return true; 1669 return true;
1665 } 1670 }
1666 1671
1667 1672
1668 Object* Heap::InitializeNumberStringCache() { 1673 Object* Heap::InitializeNumberStringCache() {
1669 // Compute the size of the number string cache based on the max heap size. 1674 // Compute the size of the number string cache based on the max heap size.
1670 // max_semispace_size_ == 512 KB => number_string_cache_size = 32. 1675 // max_semispace_size_ == 512 KB => number_string_cache_size = 32.
1671 // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB. 1676 // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB.
1672 int number_string_cache_size = max_semispace_size_ / 512; 1677 int number_string_cache_size = max_semispace_size_ / 512;
1673 number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size)); 1678 number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size));
1674 Object* obj = AllocateFixedArray(number_string_cache_size * 2); 1679 Object* obj = AllocateFixedArray(number_string_cache_size * 2, TENURED);
1675 if (!obj->IsFailure()) set_number_string_cache(FixedArray::cast(obj)); 1680 if (!obj->IsFailure()) set_number_string_cache(FixedArray::cast(obj));
1676 return obj; 1681 return obj;
1677 } 1682 }
1678 1683
1679 1684
1680 void Heap::FlushNumberStringCache() { 1685 void Heap::FlushNumberStringCache() {
1681 // Flush the number to string cache. 1686 // Flush the number to string cache.
1682 int len = number_string_cache()->length(); 1687 int len = number_string_cache()->length();
1683 for (int i = 0; i < len; i++) { 1688 for (int i = 0; i < len; i++) {
1684 number_string_cache()->set_undefined(i); 1689 number_string_cache()->set_undefined(i);
(...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after
1987 cons_string->set_length(length); 1992 cons_string->set_length(length);
1988 cons_string->set_hash_field(String::kEmptyHashField); 1993 cons_string->set_hash_field(String::kEmptyHashField);
1989 cons_string->set_first(first, mode); 1994 cons_string->set_first(first, mode);
1990 cons_string->set_second(second, mode); 1995 cons_string->set_second(second, mode);
1991 return result; 1996 return result;
1992 } 1997 }
1993 1998
1994 1999
1995 Object* Heap::AllocateSubString(String* buffer, 2000 Object* Heap::AllocateSubString(String* buffer,
1996 int start, 2001 int start,
1997 int end) { 2002 int end,
2003 PretenureFlag pretenure) {
1998 int length = end - start; 2004 int length = end - start;
1999 2005
2000 if (length == 1) { 2006 if (length == 1) {
2001 return Heap::LookupSingleCharacterStringFromCode( 2007 return Heap::LookupSingleCharacterStringFromCode(
2002 buffer->Get(start)); 2008 buffer->Get(start));
2003 } else if (length == 2) { 2009 } else if (length == 2) {
2004 // Optimization for 2-byte strings often used as keys in a decompression 2010 // Optimization for 2-byte strings often used as keys in a decompression
2005 // dictionary. Check whether we already have the string in the symbol 2011 // dictionary. Check whether we already have the string in the symbol
2006 // table to prevent creation of many unneccesary strings. 2012 // table to prevent creation of many unneccesary strings.
2007 unsigned c1 = buffer->Get(start); 2013 unsigned c1 = buffer->Get(start);
2008 unsigned c2 = buffer->Get(start + 1); 2014 unsigned c2 = buffer->Get(start + 1);
2009 return MakeOrFindTwoCharacterString(c1, c2); 2015 return MakeOrFindTwoCharacterString(c1, c2);
2010 } 2016 }
2011 2017
2012 // Make an attempt to flatten the buffer to reduce access time. 2018 // Make an attempt to flatten the buffer to reduce access time.
2013 if (!buffer->IsFlat()) { 2019 buffer->TryFlatten();
2014 buffer->TryFlatten();
2015 }
2016 2020
2017 Object* result = buffer->IsAsciiRepresentation() 2021 Object* result = buffer->IsAsciiRepresentation()
2018 ? AllocateRawAsciiString(length) 2022 ? AllocateRawAsciiString(length, pretenure )
2019 : AllocateRawTwoByteString(length); 2023 : AllocateRawTwoByteString(length, pretenure);
2020 if (result->IsFailure()) return result; 2024 if (result->IsFailure()) return result;
2021 String* string_result = String::cast(result); 2025 String* string_result = String::cast(result);
2022
2023 // Copy the characters into the new object. 2026 // Copy the characters into the new object.
2024 if (buffer->IsAsciiRepresentation()) { 2027 if (buffer->IsAsciiRepresentation()) {
2025 ASSERT(string_result->IsAsciiRepresentation()); 2028 ASSERT(string_result->IsAsciiRepresentation());
2026 char* dest = SeqAsciiString::cast(string_result)->GetChars(); 2029 char* dest = SeqAsciiString::cast(string_result)->GetChars();
2027 String::WriteToFlat(buffer, dest, start, end); 2030 String::WriteToFlat(buffer, dest, start, end);
2028 } else { 2031 } else {
2029 ASSERT(string_result->IsTwoByteRepresentation()); 2032 ASSERT(string_result->IsTwoByteRepresentation());
2030 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars(); 2033 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars();
2031 String::WriteToFlat(buffer, dest, start, end); 2034 String::WriteToFlat(buffer, dest, start, end);
2032 } 2035 }
(...skipping 929 matching lines...) Expand 10 before | Expand all | Expand 10 after
2962 array->set_length(length); 2965 array->set_length(length);
2963 Object* value = undefined_value(); 2966 Object* value = undefined_value();
2964 for (int index = 0; index < length; index++) { 2967 for (int index = 0; index < length; index++) {
2965 ASSERT(!Heap::InNewSpace(value)); // value = undefined 2968 ASSERT(!Heap::InNewSpace(value)); // value = undefined
2966 array->set(index, value, SKIP_WRITE_BARRIER); 2969 array->set(index, value, SKIP_WRITE_BARRIER);
2967 } 2970 }
2968 return array; 2971 return array;
2969 } 2972 }
2970 2973
2971 2974
2975 Object* Heap::AllocateUninitializedFixedArray(int length) {
2976 if (length == 0) return empty_fixed_array();
2977
2978 Object* obj = AllocateRawFixedArray(length);
2979 if (obj->IsFailure()) return obj;
2980
2981 reinterpret_cast<FixedArray*>(obj)->set_map(fixed_array_map());
2982 FixedArray::cast(obj)->set_length(length);
2983 return obj;
2984 }
2985
2986
2972 Object* Heap::AllocateFixedArrayWithHoles(int length) { 2987 Object* Heap::AllocateFixedArrayWithHoles(int length) {
2973 if (length == 0) return empty_fixed_array(); 2988 if (length == 0) return empty_fixed_array();
2974 Object* result = AllocateRawFixedArray(length); 2989 Object* result = AllocateRawFixedArray(length);
2975 if (!result->IsFailure()) { 2990 if (!result->IsFailure()) {
2976 // Initialize header. 2991 // Initialize header.
2977 reinterpret_cast<Array*>(result)->set_map(fixed_array_map()); 2992 reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2978 FixedArray* array = FixedArray::cast(result); 2993 FixedArray* array = FixedArray::cast(result);
2979 array->set_length(length); 2994 array->set_length(length);
2980 // Initialize body. 2995 // Initialize body.
2981 Object* value = the_hole_value(); 2996 ASSERT(!Heap::InNewSpace(the_hole_value()));
2982 for (int index = 0; index < length; index++) { 2997 MemsetPointer(HeapObject::RawField(array, FixedArray::kHeaderSize),
2983 ASSERT(!Heap::InNewSpace(value)); // value = the hole 2998 the_hole_value(),
2984 array->set(index, value, SKIP_WRITE_BARRIER); 2999 length);
2985 }
2986 } 3000 }
2987 return result; 3001 return result;
2988 } 3002 }
2989 3003
2990 3004
2991 Object* Heap::AllocateHashTable(int length) { 3005 Object* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
2992 Object* result = Heap::AllocateFixedArray(length); 3006 Object* result = Heap::AllocateFixedArray(length, pretenure);
2993 if (result->IsFailure()) return result; 3007 if (result->IsFailure()) return result;
2994 reinterpret_cast<Array*>(result)->set_map(hash_table_map()); 3008 reinterpret_cast<Array*>(result)->set_map(hash_table_map());
2995 ASSERT(result->IsHashTable()); 3009 ASSERT(result->IsHashTable());
2996 return result; 3010 return result;
2997 } 3011 }
2998 3012
2999 3013
3000 Object* Heap::AllocateGlobalContext() { 3014 Object* Heap::AllocateGlobalContext() {
3001 Object* result = Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS); 3015 Object* result = Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
3002 if (result->IsFailure()) return result; 3016 if (result->IsFailure()) return result;
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
3065 } 3079 }
3066 3080
3067 3081
3068 bool Heap::IdleNotification() { 3082 bool Heap::IdleNotification() {
3069 static const int kIdlesBeforeScavenge = 4; 3083 static const int kIdlesBeforeScavenge = 4;
3070 static const int kIdlesBeforeMarkSweep = 7; 3084 static const int kIdlesBeforeMarkSweep = 7;
3071 static const int kIdlesBeforeMarkCompact = 8; 3085 static const int kIdlesBeforeMarkCompact = 8;
3072 static int number_idle_notifications = 0; 3086 static int number_idle_notifications = 0;
3073 static int last_gc_count = gc_count_; 3087 static int last_gc_count = gc_count_;
3074 3088
3089 bool uncommit = true;
3075 bool finished = false; 3090 bool finished = false;
3076 3091
3077 if (last_gc_count == gc_count_) { 3092 if (last_gc_count == gc_count_) {
3078 number_idle_notifications++; 3093 number_idle_notifications++;
3079 } else { 3094 } else {
3080 number_idle_notifications = 0; 3095 number_idle_notifications = 0;
3081 last_gc_count = gc_count_; 3096 last_gc_count = gc_count_;
3082 } 3097 }
3083 3098
3084 if (number_idle_notifications == kIdlesBeforeScavenge) { 3099 if (number_idle_notifications == kIdlesBeforeScavenge) {
3085 CollectGarbage(0, NEW_SPACE); 3100 if (contexts_disposed_ > 0) {
3101 HistogramTimerScope scope(&Counters::gc_context);
3102 CollectAllGarbage(false);
3103 } else {
3104 CollectGarbage(0, NEW_SPACE);
3105 }
3086 new_space_.Shrink(); 3106 new_space_.Shrink();
3087 last_gc_count = gc_count_; 3107 last_gc_count = gc_count_;
3088 3108
3089 } else if (number_idle_notifications == kIdlesBeforeMarkSweep) { 3109 } else if (number_idle_notifications == kIdlesBeforeMarkSweep) {
3090 // Before doing the mark-sweep collections we clear the 3110 // Before doing the mark-sweep collections we clear the
3091 // compilation cache to avoid hanging on to source code and 3111 // compilation cache to avoid hanging on to source code and
3092 // generated code for cached functions. 3112 // generated code for cached functions.
3093 CompilationCache::Clear(); 3113 CompilationCache::Clear();
3094 3114
3095 CollectAllGarbage(false); 3115 CollectAllGarbage(false);
3096 new_space_.Shrink(); 3116 new_space_.Shrink();
3097 last_gc_count = gc_count_; 3117 last_gc_count = gc_count_;
3098 3118
3099 } else if (number_idle_notifications == kIdlesBeforeMarkCompact) { 3119 } else if (number_idle_notifications == kIdlesBeforeMarkCompact) {
3100 CollectAllGarbage(true); 3120 CollectAllGarbage(true);
3101 new_space_.Shrink(); 3121 new_space_.Shrink();
3102 last_gc_count = gc_count_; 3122 last_gc_count = gc_count_;
3103 number_idle_notifications = 0; 3123 number_idle_notifications = 0;
3104 finished = true; 3124 finished = true;
3125
3126 } else if (contexts_disposed_ > 0) {
3127 if (FLAG_expose_gc) {
3128 contexts_disposed_ = 0;
3129 } else {
3130 HistogramTimerScope scope(&Counters::gc_context);
3131 CollectAllGarbage(false);
3132 last_gc_count = gc_count_;
3133 }
3134 // If this is the first idle notification, we reset the
3135 // notification count to avoid letting idle notifications for
3136 // context disposal garbage collections start a potentially too
3137 // aggressive idle GC cycle.
3138 if (number_idle_notifications <= 1) {
3139 number_idle_notifications = 0;
3140 uncommit = false;
3141 }
3105 } 3142 }
3106 3143
3107 // Uncommit unused memory in new space. 3144 // Make sure that we have no pending context disposals and
3108 Heap::UncommitFromSpace(); 3145 // conditionally uncommit from space.
3146 ASSERT(contexts_disposed_ == 0);
3147 if (uncommit) Heap::UncommitFromSpace();
3109 return finished; 3148 return finished;
3110 } 3149 }
3111 3150
3112 3151
3113 #ifdef DEBUG 3152 #ifdef DEBUG
3114 3153
3115 void Heap::Print() { 3154 void Heap::Print() {
3116 if (!HasBeenSetup()) return; 3155 if (!HasBeenSetup()) return;
3117 Top::PrintStack(); 3156 Top::PrintStack();
3118 AllSpaces spaces; 3157 AllSpaces spaces;
(...skipping 941 matching lines...) Expand 10 before | Expand all | Expand 10 after
4060 4099
4061 MarkRootVisitor root_visitor; 4100 MarkRootVisitor root_visitor;
4062 IterateRoots(&root_visitor, VISIT_ONLY_STRONG); 4101 IterateRoots(&root_visitor, VISIT_ONLY_STRONG);
4063 } 4102 }
4064 #endif 4103 #endif
4065 4104
4066 4105
4067 GCTracer::GCTracer() 4106 GCTracer::GCTracer()
4068 : start_time_(0.0), 4107 : start_time_(0.0),
4069 start_size_(0.0), 4108 start_size_(0.0),
4109 external_time_(0.0),
4070 gc_count_(0), 4110 gc_count_(0),
4071 full_gc_count_(0), 4111 full_gc_count_(0),
4072 is_compacting_(false), 4112 is_compacting_(false),
4073 marked_count_(0) { 4113 marked_count_(0) {
4074 // These two fields reflect the state of the previous full collection. 4114 // These two fields reflect the state of the previous full collection.
4075 // Set them before they are changed by the collector. 4115 // Set them before they are changed by the collector.
4076 previous_has_compacted_ = MarkCompactCollector::HasCompacted(); 4116 previous_has_compacted_ = MarkCompactCollector::HasCompacted();
4077 previous_marked_count_ = MarkCompactCollector::previous_marked_count(); 4117 previous_marked_count_ = MarkCompactCollector::previous_marked_count();
4078 if (!FLAG_trace_gc) return; 4118 if (!FLAG_trace_gc) return;
4079 start_time_ = OS::TimeCurrentMillis(); 4119 start_time_ = OS::TimeCurrentMillis();
4080 start_size_ = SizeOfHeapObjects(); 4120 start_size_ = SizeOfHeapObjects();
4081 } 4121 }
4082 4122
4083 4123
4084 GCTracer::~GCTracer() { 4124 GCTracer::~GCTracer() {
4085 if (!FLAG_trace_gc) return; 4125 if (!FLAG_trace_gc) return;
4086 // Printf ONE line iff flag is set. 4126 // Printf ONE line iff flag is set.
4087 PrintF("%s %.1f -> %.1f MB, %d ms.\n", 4127 int time = static_cast<int>(OS::TimeCurrentMillis() - start_time_);
4088 CollectorString(), 4128 int external_time = static_cast<int>(external_time_);
4089 start_size_, SizeOfHeapObjects(), 4129 PrintF("%s %.1f -> %.1f MB, ",
4090 static_cast<int>(OS::TimeCurrentMillis() - start_time_)); 4130 CollectorString(), start_size_, SizeOfHeapObjects());
4131 if (external_time > 0) PrintF("%d / ", external_time);
4132 PrintF("%d ms.\n", time);
4091 4133
4092 #if defined(ENABLE_LOGGING_AND_PROFILING) 4134 #if defined(ENABLE_LOGGING_AND_PROFILING)
4093 Heap::PrintShortHeapStatistics(); 4135 Heap::PrintShortHeapStatistics();
4094 #endif 4136 #endif
4095 } 4137 }
4096 4138
4097 4139
4098 const char* GCTracer::CollectorString() { 4140 const char* GCTracer::CollectorString() {
4099 switch (collector_) { 4141 switch (collector_) {
4100 case SCAVENGER: 4142 case SCAVENGER:
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
4219 void ExternalStringTable::TearDown() { 4261 void ExternalStringTable::TearDown() {
4220 new_space_strings_.Free(); 4262 new_space_strings_.Free();
4221 old_space_strings_.Free(); 4263 old_space_strings_.Free();
4222 } 4264 }
4223 4265
4224 4266
4225 List<Object*> ExternalStringTable::new_space_strings_; 4267 List<Object*> ExternalStringTable::new_space_strings_;
4226 List<Object*> ExternalStringTable::old_space_strings_; 4268 List<Object*> ExternalStringTable::old_space_strings_;
4227 4269
4228 } } // namespace v8::internal 4270 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698