Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(83)

Side by Side Diff: src/heap.h

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/handles.cc ('k') | src/heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 14 matching lines...) Expand all
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #ifndef V8_HEAP_H_ 28 #ifndef V8_HEAP_H_
29 #define V8_HEAP_H_ 29 #define V8_HEAP_H_
30 30
31 #include <math.h> 31 #include <math.h>
32 32
33 #include "allocation.h" 33 #include "allocation.h"
34 #include "globals.h" 34 #include "globals.h"
35 #include "incremental-marking.h"
35 #include "list.h" 36 #include "list.h"
36 #include "mark-compact.h" 37 #include "mark-compact.h"
38 #include "objects-visiting.h"
37 #include "spaces.h" 39 #include "spaces.h"
38 #include "splay-tree-inl.h" 40 #include "splay-tree-inl.h"
41 #include "store-buffer.h"
39 #include "v8-counters.h" 42 #include "v8-counters.h"
43 #include "v8globals.h"
40 44
41 namespace v8 { 45 namespace v8 {
42 namespace internal { 46 namespace internal {
43 47
44 // TODO(isolates): remove HEAP here 48 // TODO(isolates): remove HEAP here
45 #define HEAP (_inline_get_heap_()) 49 #define HEAP (_inline_get_heap_())
46 class Heap; 50 class Heap;
47 inline Heap* _inline_get_heap_(); 51 inline Heap* _inline_get_heap_();
48 52
49 53
50 // Defines all the roots in Heap. 54 // Defines all the roots in Heap.
51 #define STRONG_ROOT_LIST(V) \ 55 #define STRONG_ROOT_LIST(V) \
52 /* Put the byte array map early. We need it to be in place by the time */ \
53 /* the deserializer hits the next page, since it wants to put a byte */ \
54 /* array in the unused space at the end of the page. */ \
55 V(Map, byte_array_map, ByteArrayMap) \ 56 V(Map, byte_array_map, ByteArrayMap) \
57 V(Map, free_space_map, FreeSpaceMap) \
56 V(Map, one_pointer_filler_map, OnePointerFillerMap) \ 58 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
57 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \ 59 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
58 /* Cluster the most popular ones in a few cache lines here at the top. */ \ 60 /* Cluster the most popular ones in a few cache lines here at the top. */ \
61 V(Smi, store_buffer_top, StoreBufferTop) \
59 V(Object, undefined_value, UndefinedValue) \ 62 V(Object, undefined_value, UndefinedValue) \
60 V(Object, the_hole_value, TheHoleValue) \ 63 V(Object, the_hole_value, TheHoleValue) \
61 V(Object, null_value, NullValue) \ 64 V(Object, null_value, NullValue) \
62 V(Object, true_value, TrueValue) \ 65 V(Object, true_value, TrueValue) \
63 V(Object, false_value, FalseValue) \ 66 V(Object, false_value, FalseValue) \
64 V(Object, arguments_marker, ArgumentsMarker) \ 67 V(Object, arguments_marker, ArgumentsMarker) \
68 V(Object, frame_alignment_marker, FrameAlignmentMarker) \
65 V(Map, heap_number_map, HeapNumberMap) \ 69 V(Map, heap_number_map, HeapNumberMap) \
66 V(Map, global_context_map, GlobalContextMap) \ 70 V(Map, global_context_map, GlobalContextMap) \
67 V(Map, fixed_array_map, FixedArrayMap) \ 71 V(Map, fixed_array_map, FixedArrayMap) \
68 V(Map, serialized_scope_info_map, SerializedScopeInfoMap) \ 72 V(Map, serialized_scope_info_map, SerializedScopeInfoMap) \
69 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \ 73 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
70 V(Map, fixed_double_array_map, FixedDoubleArrayMap) \ 74 V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
71 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \ 75 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
72 V(Map, meta_map, MetaMap) \ 76 V(Map, meta_map, MetaMap) \
73 V(Map, hash_table_map, HashTableMap) \ 77 V(Map, hash_table_map, HashTableMap) \
74 V(Smi, stack_limit, StackLimit) \ 78 V(Smi, stack_limit, StackLimit) \
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
116 V(Map, catch_context_map, CatchContextMap) \ 120 V(Map, catch_context_map, CatchContextMap) \
117 V(Map, with_context_map, WithContextMap) \ 121 V(Map, with_context_map, WithContextMap) \
118 V(Map, block_context_map, BlockContextMap) \ 122 V(Map, block_context_map, BlockContextMap) \
119 V(Map, code_map, CodeMap) \ 123 V(Map, code_map, CodeMap) \
120 V(Map, oddball_map, OddballMap) \ 124 V(Map, oddball_map, OddballMap) \
121 V(Map, global_property_cell_map, GlobalPropertyCellMap) \ 125 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
122 V(Map, shared_function_info_map, SharedFunctionInfoMap) \ 126 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
123 V(Map, message_object_map, JSMessageObjectMap) \ 127 V(Map, message_object_map, JSMessageObjectMap) \
124 V(Map, foreign_map, ForeignMap) \ 128 V(Map, foreign_map, ForeignMap) \
125 V(Object, nan_value, NanValue) \ 129 V(Object, nan_value, NanValue) \
130 V(Object, infinity_value, InfinityValue) \
126 V(Object, minus_zero_value, MinusZeroValue) \ 131 V(Object, minus_zero_value, MinusZeroValue) \
127 V(Map, neander_map, NeanderMap) \ 132 V(Map, neander_map, NeanderMap) \
128 V(JSObject, message_listeners, MessageListeners) \ 133 V(JSObject, message_listeners, MessageListeners) \
129 V(Foreign, prototype_accessors, PrototypeAccessors) \ 134 V(Foreign, prototype_accessors, PrototypeAccessors) \
130 V(NumberDictionary, code_stubs, CodeStubs) \ 135 V(NumberDictionary, code_stubs, CodeStubs) \
131 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \ 136 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
132 V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \ 137 V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
133 V(Code, js_entry_code, JsEntryCode) \ 138 V(Code, js_entry_code, JsEntryCode) \
134 V(Code, js_construct_entry_code, JsConstructEntryCode) \ 139 V(Code, js_construct_entry_code, JsConstructEntryCode) \
135 V(FixedArray, natives_source_cache, NativesSourceCache) \ 140 V(FixedArray, natives_source_cache, NativesSourceCache) \
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
219 V(redeclaration_symbol, "redeclaration") \ 224 V(redeclaration_symbol, "redeclaration") \
220 V(failure_symbol, "<failure>") \ 225 V(failure_symbol, "<failure>") \
221 V(space_symbol, " ") \ 226 V(space_symbol, " ") \
222 V(exec_symbol, "exec") \ 227 V(exec_symbol, "exec") \
223 V(zero_symbol, "0") \ 228 V(zero_symbol, "0") \
224 V(global_eval_symbol, "GlobalEval") \ 229 V(global_eval_symbol, "GlobalEval") \
225 V(identity_hash_symbol, "v8::IdentityHash") \ 230 V(identity_hash_symbol, "v8::IdentityHash") \
226 V(closure_symbol, "(closure)") \ 231 V(closure_symbol, "(closure)") \
227 V(use_strict, "use strict") \ 232 V(use_strict, "use strict") \
228 V(dot_symbol, ".") \ 233 V(dot_symbol, ".") \
229 V(anonymous_function_symbol, "(anonymous function)") 234 V(anonymous_function_symbol, "(anonymous function)") \
235 V(infinity_symbol, "Infinity") \
236 V(minus_infinity_symbol, "-Infinity")
230 237
231 // Forward declarations. 238 // Forward declarations.
232 class GCTracer; 239 class GCTracer;
233 class HeapStats; 240 class HeapStats;
234 class Isolate; 241 class Isolate;
235 class WeakObjectRetainer; 242 class WeakObjectRetainer;
236 243
237 244
238 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap, 245 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
239 Object** pointer); 246 Object** pointer);
240 247
241 typedef bool (*DirtyRegionCallback)(Heap* heap, 248 class StoreBufferRebuilder {
242 Address start, 249 public:
243 Address end, 250 explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
244 ObjectSlotCallback copy_object_func); 251 : store_buffer_(store_buffer) {
252 }
253
254 void Callback(MemoryChunk* page, StoreBufferEvent event);
255
256 private:
257 StoreBuffer* store_buffer_;
258
259 // We record in this variable how full the store buffer was when we started
260 // iterating over the current page, finding pointers to new space. If the
261 // store buffer overflows again we can exempt the page from the store buffer
262 // by rewinding to this point instead of having to search the store buffer.
263 Object*** start_of_current_page_;
264 // The current page we are scanning in the store buffer iterator.
265 MemoryChunk* current_page_;
266 };
267
245 268
246 269
247 // The all static Heap captures the interface to the global object heap. 270 // The all static Heap captures the interface to the global object heap.
248 // All JavaScript contexts by this process share the same object heap. 271 // All JavaScript contexts by this process share the same object heap.
249 272
250 #ifdef DEBUG 273 #ifdef DEBUG
251 class HeapDebugUtils; 274 class HeapDebugUtils;
252 #endif 275 #endif
253 276
254 277
255 // A queue of objects promoted during scavenge. Each object is accompanied 278 // A queue of objects promoted during scavenge. Each object is accompanied
256 // by it's size to avoid dereferencing a map pointer for scanning. 279 // by it's size to avoid dereferencing a map pointer for scanning.
257 class PromotionQueue { 280 class PromotionQueue {
258 public: 281 public:
259 PromotionQueue() : front_(NULL), rear_(NULL) { } 282 PromotionQueue() : front_(NULL), rear_(NULL) { }
260 283
261 void Initialize(Address start_address) { 284 void Initialize(Address start_address) {
285 // Assumes that a NewSpacePage exactly fits a number of promotion queue
286 // entries (where each is a pair of intptr_t). This allows us to simplify
287 // the test fpr when to switch pages.
288 ASSERT((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize)
289 == 0);
290 ASSERT(NewSpacePage::IsAtEnd(start_address));
262 front_ = rear_ = reinterpret_cast<intptr_t*>(start_address); 291 front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
263 } 292 }
264 293
265 bool is_empty() { return front_ <= rear_; } 294 bool is_empty() { return front_ == rear_; }
266 295
267 inline void insert(HeapObject* target, int size); 296 inline void insert(HeapObject* target, int size);
268 297
269 void remove(HeapObject** target, int* size) { 298 void remove(HeapObject** target, int* size) {
299 ASSERT(!is_empty());
300 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
301 NewSpacePage* front_page =
302 NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
303 ASSERT(!front_page->prev_page()->is_anchor());
304 front_ =
305 reinterpret_cast<intptr_t*>(front_page->prev_page()->body_limit());
306 }
270 *target = reinterpret_cast<HeapObject*>(*(--front_)); 307 *target = reinterpret_cast<HeapObject*>(*(--front_));
271 *size = static_cast<int>(*(--front_)); 308 *size = static_cast<int>(*(--front_));
272 // Assert no underflow. 309 // Assert no underflow.
273 ASSERT(front_ >= rear_); 310 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
311 reinterpret_cast<Address>(front_));
274 } 312 }
275 313
276 private: 314 private:
277 // The front of the queue is higher in memory than the rear. 315 // The front of the queue is higher in the memory page chain than the rear.
278 intptr_t* front_; 316 intptr_t* front_;
279 intptr_t* rear_; 317 intptr_t* rear_;
280 318
281 DISALLOW_COPY_AND_ASSIGN(PromotionQueue); 319 DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
282 }; 320 };
283 321
284 322
323 typedef void (*ScavengingCallback)(Map* map,
324 HeapObject** slot,
325 HeapObject* object);
326
327
285 // External strings table is a place where all external strings are 328 // External strings table is a place where all external strings are
286 // registered. We need to keep track of such strings to properly 329 // registered. We need to keep track of such strings to properly
287 // finalize them. 330 // finalize them.
288 class ExternalStringTable { 331 class ExternalStringTable {
289 public: 332 public:
290 // Registers an external string. 333 // Registers an external string.
291 inline void AddString(String* string); 334 inline void AddString(String* string);
292 335
293 inline void Iterate(ObjectVisitor* v); 336 inline void Iterate(ObjectVisitor* v);
294 337
(...skipping 25 matching lines...) Expand all
320 363
321 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable); 364 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
322 }; 365 };
323 366
324 367
325 class Heap { 368 class Heap {
326 public: 369 public:
327 // Configure heap size before setup. Return false if the heap has been 370 // Configure heap size before setup. Return false if the heap has been
328 // setup already. 371 // setup already.
329 bool ConfigureHeap(int max_semispace_size, 372 bool ConfigureHeap(int max_semispace_size,
330 int max_old_gen_size, 373 intptr_t max_old_gen_size,
331 int max_executable_size); 374 intptr_t max_executable_size);
332 bool ConfigureHeapDefault(); 375 bool ConfigureHeapDefault();
333 376
334 // Initializes the global object heap. If create_heap_objects is true, 377 // Initializes the global object heap. If create_heap_objects is true,
335 // also creates the basic non-mutable objects. 378 // also creates the basic non-mutable objects.
336 // Returns whether it succeeded. 379 // Returns whether it succeeded.
337 bool Setup(bool create_heap_objects); 380 bool Setup(bool create_heap_objects);
338 381
339 // Destroys all memory allocated by the heap. 382 // Destroys all memory allocated by the heap.
340 void TearDown(); 383 void TearDown();
341 384
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
449 492
450 MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler, 493 MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler,
451 Object* call_trap, 494 Object* call_trap,
452 Object* construct_trap, 495 Object* construct_trap,
453 Object* prototype); 496 Object* prototype);
454 497
455 // Reinitialize a JSReceiver into an (empty) JS object of respective type and 498 // Reinitialize a JSReceiver into an (empty) JS object of respective type and
456 // size, but keeping the original prototype. The receiver must have at least 499 // size, but keeping the original prototype. The receiver must have at least
457 // the size of the new object. The object is reinitialized and behaves as an 500 // the size of the new object. The object is reinitialized and behaves as an
458 // object that has been freshly allocated. 501 // object that has been freshly allocated.
502 // Returns failure if an error occured, otherwise object.
459 MUST_USE_RESULT MaybeObject* ReinitializeJSReceiver(JSReceiver* object, 503 MUST_USE_RESULT MaybeObject* ReinitializeJSReceiver(JSReceiver* object,
460 InstanceType type, 504 InstanceType type,
461 int size); 505 int size);
462 506
463 // Reinitialize an JSGlobalProxy based on a constructor. The object 507 // Reinitialize an JSGlobalProxy based on a constructor. The object
464 // must have the same size as objects allocated using the 508 // must have the same size as objects allocated using the
465 // constructor. The object is reinitialized and behaves as an 509 // constructor. The object is reinitialized and behaves as an
466 // object that has been freshly allocated using the constructor. 510 // object that has been freshly allocated using the constructor.
467 MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy( 511 MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
468 JSFunction* constructor, JSGlobalProxy* global); 512 JSFunction* constructor, JSGlobalProxy* global);
469 513
470 // Allocates and initializes a new JavaScript object based on a map. 514 // Allocates and initializes a new JavaScript object based on a map.
471 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 515 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
472 // failed. 516 // failed.
473 // Please note this does not perform a garbage collection. 517 // Please note this does not perform a garbage collection.
474 MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap( 518 MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
475 Map* map, PretenureFlag pretenure = NOT_TENURED); 519 Map* map, PretenureFlag pretenure = NOT_TENURED);
476 520
477 // Allocates a heap object based on the map. 521 // Allocates a heap object based on the map.
478 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 522 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
479 // failed. 523 // failed.
480 // Please note this function does not perform a garbage collection. 524 // Please note this function does not perform a garbage collection.
481 MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space); 525 MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
482 526
483 // Allocates a JS Map in the heap. 527 // Allocates a JS Map in the heap.
484 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 528 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
485 // failed. 529 // failed.
486 // Please note this function does not perform a garbage collection. 530 // Please note this function does not perform a garbage collection.
487 MUST_USE_RESULT MaybeObject* AllocateMap(InstanceType instance_type, 531 MUST_USE_RESULT MaybeObject* AllocateMap(
488 int instance_size); 532 InstanceType instance_type,
533 int instance_size,
534 ElementsKind elements_kind = FAST_ELEMENTS);
489 535
490 // Allocates a partial map for bootstrapping. 536 // Allocates a partial map for bootstrapping.
491 MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type, 537 MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
492 int instance_size); 538 int instance_size);
493 539
494 // Allocate a map for the specified function 540 // Allocate a map for the specified function
495 MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun); 541 MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
496 542
497 // Allocates an empty code cache. 543 // Allocates an empty code cache.
498 MUST_USE_RESULT MaybeObject* AllocateCodeCache(); 544 MUST_USE_RESULT MaybeObject* AllocateCodeCache();
(...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after
789 int start, 835 int start,
790 int end, 836 int end,
791 PretenureFlag pretenure = NOT_TENURED); 837 PretenureFlag pretenure = NOT_TENURED);
792 838
793 // Allocate a new external string object, which is backed by a string 839 // Allocate a new external string object, which is backed by a string
794 // resource that resides outside the V8 heap. 840 // resource that resides outside the V8 heap.
795 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 841 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
796 // failed. 842 // failed.
797 // Please note this does not perform a garbage collection. 843 // Please note this does not perform a garbage collection.
798 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii( 844 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
799 ExternalAsciiString::Resource* resource); 845 const ExternalAsciiString::Resource* resource);
800 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte( 846 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
801 ExternalTwoByteString::Resource* resource); 847 const ExternalTwoByteString::Resource* resource);
802 848
803 // Finalizes an external string by deleting the associated external 849 // Finalizes an external string by deleting the associated external
804 // data and clearing the resource pointer. 850 // data and clearing the resource pointer.
805 inline void FinalizeExternalString(String* string); 851 inline void FinalizeExternalString(String* string);
806 852
807 // Allocates an uninitialized object. The memory is non-executable if the 853 // Allocates an uninitialized object. The memory is non-executable if the
808 // hardware and OS allow. 854 // hardware and OS allow.
809 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 855 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
810 // failed. 856 // failed.
811 // Please note this function does not perform a garbage collection. 857 // Please note this function does not perform a garbage collection.
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
878 // Performs garbage collection operation. 924 // Performs garbage collection operation.
879 // Returns whether there is a chance that another major GC could 925 // Returns whether there is a chance that another major GC could
880 // collect more garbage. 926 // collect more garbage.
881 bool CollectGarbage(AllocationSpace space, GarbageCollector collector); 927 bool CollectGarbage(AllocationSpace space, GarbageCollector collector);
882 928
883 // Performs garbage collection operation. 929 // Performs garbage collection operation.
884 // Returns whether there is a chance that another major GC could 930 // Returns whether there is a chance that another major GC could
885 // collect more garbage. 931 // collect more garbage.
886 inline bool CollectGarbage(AllocationSpace space); 932 inline bool CollectGarbage(AllocationSpace space);
887 933
888 // Performs a full garbage collection. Force compaction if the 934 static const int kNoGCFlags = 0;
889 // parameter is true. 935 static const int kMakeHeapIterableMask = 1;
890 void CollectAllGarbage(bool force_compaction); 936
937 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
938 // non-zero, then the slower precise sweeper is used, which leaves the heap
939 // in a state where we can iterate over the heap visiting all objects.
940 void CollectAllGarbage(int flags);
891 941
892 // Last hope GC, should try to squeeze as much as possible. 942 // Last hope GC, should try to squeeze as much as possible.
893 void CollectAllAvailableGarbage(); 943 void CollectAllAvailableGarbage();
894 944
945 // Check whether the heap is currently iterable.
946 bool IsHeapIterable();
947
948 // Ensure that we have swept all spaces in such a way that we can iterate
949 // over all objects. May cause a GC.
950 void EnsureHeapIsIterable();
951
895 // Notify the heap that a context has been disposed. 952 // Notify the heap that a context has been disposed.
896 int NotifyContextDisposed() { return ++contexts_disposed_; } 953 int NotifyContextDisposed() { return ++contexts_disposed_; }
897 954
898 // Utility to invoke the scavenger. This is needed in test code to 955 // Utility to invoke the scavenger. This is needed in test code to
899 // ensure correct callback for weak global handles. 956 // ensure correct callback for weak global handles.
900 void PerformScavenge(); 957 void PerformScavenge();
901 958
959 inline void increment_scan_on_scavenge_pages() {
960 scan_on_scavenge_pages_++;
961 if (FLAG_gc_verbose) {
962 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
963 }
964 }
965
966 inline void decrement_scan_on_scavenge_pages() {
967 scan_on_scavenge_pages_--;
968 if (FLAG_gc_verbose) {
969 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
970 }
971 }
972
902 PromotionQueue* promotion_queue() { return &promotion_queue_; } 973 PromotionQueue* promotion_queue() { return &promotion_queue_; }
903 974
904 #ifdef DEBUG 975 #ifdef DEBUG
905 // Utility used with flag gc-greedy. 976 // Utility used with flag gc-greedy.
906 void GarbageCollectionGreedyCheck(); 977 void GarbageCollectionGreedyCheck();
907 #endif 978 #endif
908 979
909 void AddGCPrologueCallback( 980 void AddGCPrologueCallback(
910 GCEpilogueCallback callback, GCType gc_type_filter); 981 GCEpilogueCallback callback, GCType gc_type_filter);
911 void RemoveGCPrologueCallback(GCEpilogueCallback callback); 982 void RemoveGCPrologueCallback(GCEpilogueCallback callback);
912 983
913 void AddGCEpilogueCallback( 984 void AddGCEpilogueCallback(
914 GCEpilogueCallback callback, GCType gc_type_filter); 985 GCEpilogueCallback callback, GCType gc_type_filter);
915 void RemoveGCEpilogueCallback(GCEpilogueCallback callback); 986 void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
916 987
917 void SetGlobalGCPrologueCallback(GCCallback callback) { 988 void SetGlobalGCPrologueCallback(GCCallback callback) {
918 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL)); 989 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
919 global_gc_prologue_callback_ = callback; 990 global_gc_prologue_callback_ = callback;
920 } 991 }
921 void SetGlobalGCEpilogueCallback(GCCallback callback) { 992 void SetGlobalGCEpilogueCallback(GCCallback callback) {
922 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL)); 993 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
923 global_gc_epilogue_callback_ = callback; 994 global_gc_epilogue_callback_ = callback;
924 } 995 }
925 996
926 // Heap root getters. We have versions with and without type::cast() here. 997 // Heap root getters. We have versions with and without type::cast() here.
927 // You can't use type::cast during GC because the assert fails. 998 // You can't use type::cast during GC because the assert fails.
999 // TODO(1490): Try removing the unchecked accessors, now that GC marking does
1000 // not corrupt the stack.
928 #define ROOT_ACCESSOR(type, name, camel_name) \ 1001 #define ROOT_ACCESSOR(type, name, camel_name) \
929 type* name() { \ 1002 type* name() { \
930 return type::cast(roots_[k##camel_name##RootIndex]); \ 1003 return type::cast(roots_[k##camel_name##RootIndex]); \
931 } \ 1004 } \
932 type* raw_unchecked_##name() { \ 1005 type* raw_unchecked_##name() { \
933 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \ 1006 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
934 } 1007 }
935 ROOT_LIST(ROOT_ACCESSOR) 1008 ROOT_LIST(ROOT_ACCESSOR)
936 #undef ROOT_ACCESSOR 1009 #undef ROOT_ACCESSOR
937 1010
(...skipping 20 matching lines...) Expand all
958 } 1031 }
959 Object* global_contexts_list() { return global_contexts_list_; } 1032 Object* global_contexts_list() { return global_contexts_list_; }
960 1033
961 // Iterates over all roots in the heap. 1034 // Iterates over all roots in the heap.
962 void IterateRoots(ObjectVisitor* v, VisitMode mode); 1035 void IterateRoots(ObjectVisitor* v, VisitMode mode);
963 // Iterates over all strong roots in the heap. 1036 // Iterates over all strong roots in the heap.
964 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); 1037 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
965 // Iterates over all the other roots in the heap. 1038 // Iterates over all the other roots in the heap.
966 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); 1039 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
967 1040
968 enum ExpectedPageWatermarkState {
969 WATERMARK_SHOULD_BE_VALID,
970 WATERMARK_CAN_BE_INVALID
971 };
972
973 // For each dirty region on a page in use from an old space call
974 // visit_dirty_region callback.
975 // If either visit_dirty_region or callback can cause an allocation
976 // in old space and changes in allocation watermark then
977 // can_preallocate_during_iteration should be set to true.
978 // All pages will be marked as having invalid watermark upon
979 // iteration completion.
980 void IterateDirtyRegions(
981 PagedSpace* space,
982 DirtyRegionCallback visit_dirty_region,
983 ObjectSlotCallback callback,
984 ExpectedPageWatermarkState expected_page_watermark_state);
985
986 // Interpret marks as a bitvector of dirty marks for regions of size
987 // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering
988 // memory interval from start to top. For each dirty region call a
989 // visit_dirty_region callback. Return updated bitvector of dirty marks.
990 uint32_t IterateDirtyRegions(uint32_t marks,
991 Address start,
992 Address end,
993 DirtyRegionCallback visit_dirty_region,
994 ObjectSlotCallback callback);
995
996 // Iterate pointers to from semispace of new space found in memory interval 1041 // Iterate pointers to from semispace of new space found in memory interval
997 // from start to end. 1042 // from start to end.
998 // Update dirty marks for page containing start address.
999 void IterateAndMarkPointersToFromSpace(Address start, 1043 void IterateAndMarkPointersToFromSpace(Address start,
1000 Address end, 1044 Address end,
1001 ObjectSlotCallback callback); 1045 ObjectSlotCallback callback);
1002 1046
1003 // Iterate pointers to new space found in memory interval from start to end.
1004 // Return true if pointers to new space was found.
1005 static bool IteratePointersInDirtyRegion(Heap* heap,
1006 Address start,
1007 Address end,
1008 ObjectSlotCallback callback);
1009
1010
1011 // Iterate pointers to new space found in memory interval from start to end.
1012 // This interval is considered to belong to the map space.
1013 // Return true if pointers to new space was found.
1014 static bool IteratePointersInDirtyMapsRegion(Heap* heap,
1015 Address start,
1016 Address end,
1017 ObjectSlotCallback callback);
1018
1019
1020 // Returns whether the object resides in new space. 1047 // Returns whether the object resides in new space.
1021 inline bool InNewSpace(Object* object); 1048 inline bool InNewSpace(Object* object);
1049 inline bool InNewSpace(Address addr);
1050 inline bool InNewSpacePage(Address addr);
1022 inline bool InFromSpace(Object* object); 1051 inline bool InFromSpace(Object* object);
1023 inline bool InToSpace(Object* object); 1052 inline bool InToSpace(Object* object);
1024 1053
1025 // Checks whether an address/object in the heap (including auxiliary 1054 // Checks whether an address/object in the heap (including auxiliary
1026 // area and unused area). 1055 // area and unused area).
1027 bool Contains(Address addr); 1056 bool Contains(Address addr);
1028 bool Contains(HeapObject* value); 1057 bool Contains(HeapObject* value);
1029 1058
1030 // Checks whether an address/object in a space. 1059 // Checks whether an address/object in a space.
1031 // Currently used by tests, serialization and heap verification only. 1060 // Currently used by tests, serialization and heap verification only.
(...skipping 18 matching lines...) Expand all
1050 1079
1051 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary). 1080 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
1052 void public_set_non_monomorphic_cache(NumberDictionary* value) { 1081 void public_set_non_monomorphic_cache(NumberDictionary* value) {
1053 roots_[kNonMonomorphicCacheRootIndex] = value; 1082 roots_[kNonMonomorphicCacheRootIndex] = value;
1054 } 1083 }
1055 1084
1056 void public_set_empty_script(Script* script) { 1085 void public_set_empty_script(Script* script) {
1057 roots_[kEmptyScriptRootIndex] = script; 1086 roots_[kEmptyScriptRootIndex] = script;
1058 } 1087 }
1059 1088
1089 void public_set_store_buffer_top(Address* top) {
1090 roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
1091 }
1092
1060 // Update the next script id. 1093 // Update the next script id.
1061 inline void SetLastScriptId(Object* last_script_id); 1094 inline void SetLastScriptId(Object* last_script_id);
1062 1095
1063 // Generated code can embed this address to get access to the roots. 1096 // Generated code can embed this address to get access to the roots.
1064 Object** roots_address() { return roots_; } 1097 Object** roots_address() { return roots_; }
1065 1098
1099 Address* store_buffer_top_address() {
1100 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1101 }
1102
1066 // Get address of global contexts list for serialization support. 1103 // Get address of global contexts list for serialization support.
1067 Object** global_contexts_list_address() { 1104 Object** global_contexts_list_address() {
1068 return &global_contexts_list_; 1105 return &global_contexts_list_;
1069 } 1106 }
1070 1107
1071 #ifdef DEBUG 1108 #ifdef DEBUG
1072 void Print(); 1109 void Print();
1073 void PrintHandles(); 1110 void PrintHandles();
1074 1111
1075 // Verify the heap is in its normal state before or after a GC. 1112 // Verify the heap is in its normal state before or after a GC.
1076 void Verify(); 1113 void Verify();
1077 1114
1115 void OldPointerSpaceCheckStoreBuffer();
1116 void MapSpaceCheckStoreBuffer();
1117 void LargeObjectSpaceCheckStoreBuffer();
1118
1078 // Report heap statistics. 1119 // Report heap statistics.
1079 void ReportHeapStatistics(const char* title); 1120 void ReportHeapStatistics(const char* title);
1080 void ReportCodeStatistics(const char* title); 1121 void ReportCodeStatistics(const char* title);
1081 1122
1082 // Fill in bogus values in from space 1123 // Fill in bogus values in from space
1083 void ZapFromSpace(); 1124 void ZapFromSpace();
1084 #endif 1125 #endif
1085 1126
1086 // Print short heap statistics. 1127 // Print short heap statistics.
1087 void PrintShortHeapStatistics(); 1128 void PrintShortHeapStatistics();
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
1163 1204
1164 // Adjusts the amount of registered external memory. 1205 // Adjusts the amount of registered external memory.
1165 // Returns the adjusted value. 1206 // Returns the adjusted value.
1166 inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes); 1207 inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
1167 1208
1168 // Allocate uninitialized fixed array. 1209 // Allocate uninitialized fixed array.
1169 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length); 1210 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
1170 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length, 1211 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
1171 PretenureFlag pretenure); 1212 PretenureFlag pretenure);
1172 1213
1173 // True if we have reached the allocation limit in the old generation that 1214 inline intptr_t PromotedTotalSize() {
1174 // should force the next GC (caused normally) to be a full one. 1215 return PromotedSpaceSize() + PromotedExternalMemorySize();
1175 bool OldGenerationPromotionLimitReached() {
1176 return (PromotedSpaceSize() + PromotedExternalMemorySize())
1177 > old_gen_promotion_limit_;
1178 }
1179
1180 intptr_t OldGenerationSpaceAvailable() {
1181 return old_gen_allocation_limit_ -
1182 (PromotedSpaceSize() + PromotedExternalMemorySize());
1183 } 1216 }
1184 1217
1185 // True if we have reached the allocation limit in the old generation that 1218 // True if we have reached the allocation limit in the old generation that
1186 // should artificially cause a GC right now. 1219 // should force the next GC (caused normally) to be a full one.
1187 bool OldGenerationAllocationLimitReached() { 1220 inline bool OldGenerationPromotionLimitReached() {
1188 return OldGenerationSpaceAvailable() < 0; 1221 return PromotedTotalSize() > old_gen_promotion_limit_;
1222 }
1223
1224 inline intptr_t OldGenerationSpaceAvailable() {
1225 return old_gen_allocation_limit_ - PromotedTotalSize();
1226 }
1227
1228 static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize;
1229 static const intptr_t kMinimumAllocationLimit =
1230 8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1231
1232 // When we sweep lazily we initially guess that there is no garbage on the
1233 // heap and set the limits for the next GC accordingly. As we sweep we find
1234 // out that some of the pages contained garbage and we have to adjust
1235 // downwards the size of the heap. This means the limits that control the
1236 // timing of the next GC also need to be adjusted downwards.
1237 void LowerOldGenLimits(intptr_t adjustment) {
1238 size_of_old_gen_at_last_old_space_gc_ -= adjustment;
1239 old_gen_promotion_limit_ =
1240 OldGenPromotionLimit(size_of_old_gen_at_last_old_space_gc_);
1241 old_gen_allocation_limit_ =
1242 OldGenAllocationLimit(size_of_old_gen_at_last_old_space_gc_);
1243 }
1244
1245 intptr_t OldGenPromotionLimit(intptr_t old_gen_size) {
1246 intptr_t limit =
1247 Max(old_gen_size + old_gen_size / 3, kMinimumPromotionLimit);
1248 limit += new_space_.Capacity();
1249 limit *= old_gen_limit_factor_;
1250 return limit;
1251 }
1252
1253 intptr_t OldGenAllocationLimit(intptr_t old_gen_size) {
1254 intptr_t limit =
1255 Max(old_gen_size + old_gen_size / 2, kMinimumAllocationLimit);
1256 limit += new_space_.Capacity();
1257 limit *= old_gen_limit_factor_;
1258 return limit;
1189 } 1259 }
1190 1260
1191 // Can be called when the embedding application is idle. 1261 // Can be called when the embedding application is idle.
1192 bool IdleNotification(); 1262 bool IdleNotification();
1193 1263
1194 // Declare all the root indices. 1264 // Declare all the root indices.
1195 enum RootListIndex { 1265 enum RootListIndex {
1196 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, 1266 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1197 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) 1267 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1198 #undef ROOT_INDEX_DECLARATION 1268 #undef ROOT_INDEX_DECLARATION
1199 1269
1200 // Utility type maps 1270 // Utility type maps
1201 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex, 1271 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1202 STRUCT_LIST(DECLARE_STRUCT_MAP) 1272 STRUCT_LIST(DECLARE_STRUCT_MAP)
1203 #undef DECLARE_STRUCT_MAP 1273 #undef DECLARE_STRUCT_MAP
1204 1274
1205 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex, 1275 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1206 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION) 1276 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
1207 #undef SYMBOL_DECLARATION 1277 #undef SYMBOL_DECLARATION
1208 1278
1209 kSymbolTableRootIndex, 1279 kSymbolTableRootIndex,
1210 kStrongRootListLength = kSymbolTableRootIndex, 1280 kStrongRootListLength = kSymbolTableRootIndex,
1211 kRootListLength 1281 kRootListLength
1212 }; 1282 };
1213 1283
1214 MUST_USE_RESULT MaybeObject* NumberToString( 1284 MUST_USE_RESULT MaybeObject* NumberToString(
1215 Object* number, bool check_number_string_cache = true); 1285 Object* number, bool check_number_string_cache = true);
1286 MUST_USE_RESULT MaybeObject* Uint32ToString(
1287 uint32_t value, bool check_number_string_cache = true);
1216 1288
1217 Map* MapForExternalArrayType(ExternalArrayType array_type); 1289 Map* MapForExternalArrayType(ExternalArrayType array_type);
1218 RootListIndex RootIndexForExternalArrayType( 1290 RootListIndex RootIndexForExternalArrayType(
1219 ExternalArrayType array_type); 1291 ExternalArrayType array_type);
1220 1292
1221 void RecordStats(HeapStats* stats, bool take_snapshot = false); 1293 void RecordStats(HeapStats* stats, bool take_snapshot = false);
1222 1294
1223 // Copy block of memory from src to dst. Size of block should be aligned 1295 // Copy block of memory from src to dst. Size of block should be aligned
1224 // by pointer size. 1296 // by pointer size.
1225 static inline void CopyBlock(Address dst, Address src, int byte_size); 1297 static inline void CopyBlock(Address dst, Address src, int byte_size);
1226 1298
1227 inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
1228 Address src,
1229 int byte_size);
1230
1231 // Optimized version of memmove for blocks with pointer size aligned sizes and 1299 // Optimized version of memmove for blocks with pointer size aligned sizes and
1232 // pointer size aligned addresses. 1300 // pointer size aligned addresses.
1233 static inline void MoveBlock(Address dst, Address src, int byte_size); 1301 static inline void MoveBlock(Address dst, Address src, int byte_size);
1234 1302
1235 inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
1236 Address src,
1237 int byte_size);
1238
1239 // Check new space expansion criteria and expand semispaces if it was hit. 1303 // Check new space expansion criteria and expand semispaces if it was hit.
1240 void CheckNewSpaceExpansionCriteria(); 1304 void CheckNewSpaceExpansionCriteria();
1241 1305
1242 inline void IncrementYoungSurvivorsCounter(int survived) { 1306 inline void IncrementYoungSurvivorsCounter(int survived) {
1243 young_survivors_after_last_gc_ = survived; 1307 young_survivors_after_last_gc_ = survived;
1244 survived_since_last_expansion_ += survived; 1308 survived_since_last_expansion_ += survived;
1245 } 1309 }
1246 1310
1311 inline bool NextGCIsLikelyToBeFull() {
1312 if (FLAG_gc_global) return true;
1313
1314 intptr_t total_promoted = PromotedTotalSize();
1315
1316 intptr_t adjusted_promotion_limit =
1317 old_gen_promotion_limit_ - new_space_.Capacity();
1318
1319 if (total_promoted >= adjusted_promotion_limit) return true;
1320
1321 intptr_t adjusted_allocation_limit =
1322 old_gen_allocation_limit_ - new_space_.Capacity() / 5;
1323
1324 if (PromotedSpaceSize() >= adjusted_allocation_limit) return true;
1325
1326 return false;
1327 }
1328
1329
1247 void UpdateNewSpaceReferencesInExternalStringTable( 1330 void UpdateNewSpaceReferencesInExternalStringTable(
1248 ExternalStringTableUpdaterCallback updater_func); 1331 ExternalStringTableUpdaterCallback updater_func);
1249 1332
1333 void UpdateReferencesInExternalStringTable(
1334 ExternalStringTableUpdaterCallback updater_func);
1335
1250 void ProcessWeakReferences(WeakObjectRetainer* retainer); 1336 void ProcessWeakReferences(WeakObjectRetainer* retainer);
1251 1337
1252 // Helper function that governs the promotion policy from new space to 1338 // Helper function that governs the promotion policy from new space to
1253 // old. If the object's old address lies below the new space's age 1339 // old. If the object's old address lies below the new space's age
1254 // mark or if we've already filled the bottom 1/16th of the to space, 1340 // mark or if we've already filled the bottom 1/16th of the to space,
1255 // we try to promote this object. 1341 // we try to promote this object.
1256 inline bool ShouldBePromoted(Address old_address, int object_size); 1342 inline bool ShouldBePromoted(Address old_address, int object_size);
1257 1343
1258 int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; } 1344 int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1259 1345
1260 void ClearJSFunctionResultCaches(); 1346 void ClearJSFunctionResultCaches();
1261 1347
1262 void ClearNormalizedMapCaches(); 1348 void ClearNormalizedMapCaches();
1263 1349
1264 GCTracer* tracer() { return tracer_; } 1350 GCTracer* tracer() { return tracer_; }
1265 1351
1352 // Returns the size of objects residing in non new spaces.
1353 intptr_t PromotedSpaceSize();
1354
1266 double total_regexp_code_generated() { return total_regexp_code_generated_; } 1355 double total_regexp_code_generated() { return total_regexp_code_generated_; }
1267 void IncreaseTotalRegexpCodeGenerated(int size) { 1356 void IncreaseTotalRegexpCodeGenerated(int size) {
1268 total_regexp_code_generated_ += size; 1357 total_regexp_code_generated_ += size;
1269 } 1358 }
1270 1359
1271 // Returns maximum GC pause. 1360 // Returns maximum GC pause.
1272 int get_max_gc_pause() { return max_gc_pause_; } 1361 int get_max_gc_pause() { return max_gc_pause_; }
1273 1362
1274 // Returns maximum size of objects alive after GC. 1363 // Returns maximum size of objects alive after GC.
1275 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; } 1364 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1276 1365
1277 // Returns minimal interval between two subsequent collections. 1366 // Returns minimal interval between two subsequent collections.
1278 int get_min_in_mutator() { return min_in_mutator_; } 1367 int get_min_in_mutator() { return min_in_mutator_; }
1279 1368
1280 MarkCompactCollector* mark_compact_collector() { 1369 MarkCompactCollector* mark_compact_collector() {
1281 return &mark_compact_collector_; 1370 return &mark_compact_collector_;
1282 } 1371 }
1283 1372
1373 StoreBuffer* store_buffer() {
1374 return &store_buffer_;
1375 }
1376
1377 Marking* marking() {
1378 return &marking_;
1379 }
1380
1381 IncrementalMarking* incremental_marking() {
1382 return &incremental_marking_;
1383 }
1384
1284 ExternalStringTable* external_string_table() { 1385 ExternalStringTable* external_string_table() {
1285 return &external_string_table_; 1386 return &external_string_table_;
1286 } 1387 }
1287 1388
1288 // Returns the current sweep generation. 1389 // Returns the current sweep generation.
1289 int sweep_generation() { 1390 int sweep_generation() {
1290 return sweep_generation_; 1391 return sweep_generation_;
1291 } 1392 }
1292 1393
1293 inline Isolate* isolate(); 1394 inline Isolate* isolate();
1294 bool is_safe_to_read_maps() { return is_safe_to_read_maps_; }
1295 1395
1296 void CallGlobalGCPrologueCallback() { 1396 inline void CallGlobalGCPrologueCallback() {
1297 if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_(); 1397 if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_();
1298 } 1398 }
1299 1399
1300 void CallGlobalGCEpilogueCallback() { 1400 inline void CallGlobalGCEpilogueCallback() {
1301 if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_(); 1401 if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_();
1302 } 1402 }
1303 1403
1404 inline bool OldGenerationAllocationLimitReached();
1405
1406 inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1407 scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1408 }
1409
1410 void QueueMemoryChunkForFree(MemoryChunk* chunk);
1411 void FreeQueuedChunks();
1412
1413 // Completely clear the Instanceof cache (to stop it keeping objects alive
1414 // around a GC).
1415 inline void CompletelyClearInstanceofCache();
1416
1304 private: 1417 private:
1305 Heap(); 1418 Heap();
1306 1419
1307 // This can be calculated directly from a pointer to the heap; however, it is 1420 // This can be calculated directly from a pointer to the heap; however, it is
1308 // more expedient to get at the isolate directly from within Heap methods. 1421 // more expedient to get at the isolate directly from within Heap methods.
1309 Isolate* isolate_; 1422 Isolate* isolate_;
1310 1423
1424 intptr_t code_range_size_;
1311 int reserved_semispace_size_; 1425 int reserved_semispace_size_;
1312 int max_semispace_size_; 1426 int max_semispace_size_;
1313 int initial_semispace_size_; 1427 int initial_semispace_size_;
1314 intptr_t max_old_generation_size_; 1428 intptr_t max_old_generation_size_;
1315 intptr_t max_executable_size_; 1429 intptr_t max_executable_size_;
1316 intptr_t code_range_size_;
1317 1430
1318 // For keeping track of how much data has survived 1431 // For keeping track of how much data has survived
1319 // scavenge since last new space expansion. 1432 // scavenge since last new space expansion.
1320 int survived_since_last_expansion_; 1433 int survived_since_last_expansion_;
1321 1434
1322 // For keeping track on when to flush RegExp code. 1435 // For keeping track on when to flush RegExp code.
1323 int sweep_generation_; 1436 int sweep_generation_;
1324 1437
1325 int always_allocate_scope_depth_; 1438 int always_allocate_scope_depth_;
1326 int linear_allocation_scope_depth_; 1439 int linear_allocation_scope_depth_;
1327 1440
1328 // For keeping track of context disposals. 1441 // For keeping track of context disposals.
1329 int contexts_disposed_; 1442 int contexts_disposed_;
1330 1443
1444 int scan_on_scavenge_pages_;
1445
1331 #if defined(V8_TARGET_ARCH_X64) 1446 #if defined(V8_TARGET_ARCH_X64)
1332 static const int kMaxObjectSizeInNewSpace = 1024*KB; 1447 static const int kMaxObjectSizeInNewSpace = 1024*KB;
1333 #else 1448 #else
1334 static const int kMaxObjectSizeInNewSpace = 512*KB; 1449 static const int kMaxObjectSizeInNewSpace = 512*KB;
1335 #endif 1450 #endif
1336 1451
1337 NewSpace new_space_; 1452 NewSpace new_space_;
1338 OldSpace* old_pointer_space_; 1453 OldSpace* old_pointer_space_;
1339 OldSpace* old_data_space_; 1454 OldSpace* old_data_space_;
1340 OldSpace* code_space_; 1455 OldSpace* code_space_;
1341 MapSpace* map_space_; 1456 MapSpace* map_space_;
1342 CellSpace* cell_space_; 1457 CellSpace* cell_space_;
1343 LargeObjectSpace* lo_space_; 1458 LargeObjectSpace* lo_space_;
1344 HeapState gc_state_; 1459 HeapState gc_state_;
1345 int gc_post_processing_depth_; 1460 int gc_post_processing_depth_;
1346 1461
1347 // Returns the size of object residing in non new spaces.
1348 intptr_t PromotedSpaceSize();
1349
1350 // Returns the amount of external memory registered since last global gc. 1462 // Returns the amount of external memory registered since last global gc.
1351 int PromotedExternalMemorySize(); 1463 int PromotedExternalMemorySize();
1352 1464
1353 int mc_count_; // how many mark-compact collections happened
1354 int ms_count_; // how many mark-sweep collections happened 1465 int ms_count_; // how many mark-sweep collections happened
1355 unsigned int gc_count_; // how many gc happened 1466 unsigned int gc_count_; // how many gc happened
1356 1467
1357 // Total length of the strings we failed to flatten since the last GC. 1468 // Total length of the strings we failed to flatten since the last GC.
1358 int unflattened_strings_length_; 1469 int unflattened_strings_length_;
1359 1470
1360 #define ROOT_ACCESSOR(type, name, camel_name) \ 1471 #define ROOT_ACCESSOR(type, name, camel_name) \
1361 inline void set_##name(type* value) { \ 1472 inline void set_##name(type* value) { \
1362 roots_[k##camel_name##RootIndex] = value; \ 1473 roots_[k##camel_name##RootIndex] = value; \
1363 } 1474 }
(...skipping 18 matching lines...) Expand all
1382 // Limit that triggers a global GC on the next (normally caused) GC. This 1493 // Limit that triggers a global GC on the next (normally caused) GC. This
1383 // is checked when we have already decided to do a GC to help determine 1494 // is checked when we have already decided to do a GC to help determine
1384 // which collector to invoke. 1495 // which collector to invoke.
1385 intptr_t old_gen_promotion_limit_; 1496 intptr_t old_gen_promotion_limit_;
1386 1497
1387 // Limit that triggers a global GC as soon as is reasonable. This is 1498 // Limit that triggers a global GC as soon as is reasonable. This is
1388 // checked before expanding a paged space in the old generation and on 1499 // checked before expanding a paged space in the old generation and on
1389 // every allocation in large object space. 1500 // every allocation in large object space.
1390 intptr_t old_gen_allocation_limit_; 1501 intptr_t old_gen_allocation_limit_;
1391 1502
1503 // Sometimes the heuristics dictate that those limits are increased. This
1504 // variable records that fact.
1505 int old_gen_limit_factor_;
1506
1507 // Used to adjust the limits that control the timing of the next GC.
1508 intptr_t size_of_old_gen_at_last_old_space_gc_;
1509
1392 // Limit on the amount of externally allocated memory allowed 1510 // Limit on the amount of externally allocated memory allowed
1393 // between global GCs. If reached a global GC is forced. 1511 // between global GCs. If reached a global GC is forced.
1394 intptr_t external_allocation_limit_; 1512 intptr_t external_allocation_limit_;
1395 1513
1396 // The amount of external memory registered through the API kept alive 1514 // The amount of external memory registered through the API kept alive
1397 // by global handles 1515 // by global handles
1398 int amount_of_external_allocated_memory_; 1516 int amount_of_external_allocated_memory_;
1399 1517
1400 // Caches the amount of external memory registered at the last global gc. 1518 // Caches the amount of external memory registered at the last global gc.
1401 int amount_of_external_allocated_memory_at_last_global_gc_; 1519 int amount_of_external_allocated_memory_at_last_global_gc_;
1402 1520
1403 // Indicates that an allocation has failed in the old generation since the 1521 // Indicates that an allocation has failed in the old generation since the
1404 // last GC. 1522 // last GC.
1405 int old_gen_exhausted_; 1523 int old_gen_exhausted_;
1406 1524
1407 Object* roots_[kRootListLength]; 1525 Object* roots_[kRootListLength];
1408 1526
1409 Object* global_contexts_list_; 1527 Object* global_contexts_list_;
1410 1528
1529 StoreBufferRebuilder store_buffer_rebuilder_;
1530
1411 struct StringTypeTable { 1531 struct StringTypeTable {
1412 InstanceType type; 1532 InstanceType type;
1413 int size; 1533 int size;
1414 RootListIndex index; 1534 RootListIndex index;
1415 }; 1535 };
1416 1536
1417 struct ConstantSymbolTable { 1537 struct ConstantSymbolTable {
1418 const char* contents; 1538 const char* contents;
1419 RootListIndex index; 1539 RootListIndex index;
1420 }; 1540 };
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
1458 GCType gc_type; 1578 GCType gc_type;
1459 }; 1579 };
1460 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_; 1580 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1461 1581
1462 GCCallback global_gc_prologue_callback_; 1582 GCCallback global_gc_prologue_callback_;
1463 GCCallback global_gc_epilogue_callback_; 1583 GCCallback global_gc_epilogue_callback_;
1464 1584
1465 // Support for computing object sizes during GC. 1585 // Support for computing object sizes during GC.
1466 HeapObjectCallback gc_safe_size_of_old_object_; 1586 HeapObjectCallback gc_safe_size_of_old_object_;
1467 static int GcSafeSizeOfOldObject(HeapObject* object); 1587 static int GcSafeSizeOfOldObject(HeapObject* object);
1468 static int GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object);
1469 1588
1470 // Update the GC state. Called from the mark-compact collector. 1589 // Update the GC state. Called from the mark-compact collector.
1471 void MarkMapPointersAsEncoded(bool encoded) { 1590 void MarkMapPointersAsEncoded(bool encoded) {
1472 gc_safe_size_of_old_object_ = encoded 1591 ASSERT(!encoded);
1473 ? &GcSafeSizeOfOldObjectWithEncodedMap 1592 gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1474 : &GcSafeSizeOfOldObject;
1475 } 1593 }
1476 1594
1477 // Checks whether a global GC is necessary 1595 // Checks whether a global GC is necessary
1478 GarbageCollector SelectGarbageCollector(AllocationSpace space); 1596 GarbageCollector SelectGarbageCollector(AllocationSpace space);
1479 1597
1480 // Performs garbage collection 1598 // Performs garbage collection
1481 // Returns whether there is a chance another major GC could 1599 // Returns whether there is a chance another major GC could
1482 // collect more garbage. 1600 // collect more garbage.
1483 bool PerformGarbageCollection(GarbageCollector collector, 1601 bool PerformGarbageCollection(GarbageCollector collector,
1484 GCTracer* tracer); 1602 GCTracer* tracer);
1485 1603
1486 static const intptr_t kMinimumPromotionLimit = 2 * MB;
1487 static const intptr_t kMinimumAllocationLimit = 8 * MB;
1488 1604
1489 inline void UpdateOldSpaceLimits(); 1605 inline void UpdateOldSpaceLimits();
1490 1606
1607
1491 // Allocate an uninitialized object in map space. The behavior is identical 1608 // Allocate an uninitialized object in map space. The behavior is identical
1492 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't 1609 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1493 // have to test the allocation space argument and (b) can reduce code size 1610 // have to test the allocation space argument and (b) can reduce code size
1494 // (since both AllocateRaw and AllocateRawMap are inlined). 1611 // (since both AllocateRaw and AllocateRawMap are inlined).
1495 MUST_USE_RESULT inline MaybeObject* AllocateRawMap(); 1612 MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
1496 1613
1497 // Allocate an uninitialized object in the global property cell space. 1614 // Allocate an uninitialized object in the global property cell space.
1498 MUST_USE_RESULT inline MaybeObject* AllocateRawCell(); 1615 MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
1499 1616
1500 // Initializes a JSObject based on its map. 1617 // Initializes a JSObject based on its map.
(...skipping 14 matching lines...) Expand all
1515 MaybeObject* CreateOddball(const char* to_string, 1632 MaybeObject* CreateOddball(const char* to_string,
1516 Object* to_number, 1633 Object* to_number,
1517 byte kind); 1634 byte kind);
1518 1635
1519 // Allocate empty fixed array. 1636 // Allocate empty fixed array.
1520 MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray(); 1637 MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
1521 1638
1522 // Allocate empty fixed double array. 1639 // Allocate empty fixed double array.
1523 MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray(); 1640 MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
1524 1641
1525 void SwitchScavengingVisitorsTableIfProfilingWasEnabled();
1526
1527 // Performs a minor collection in new generation. 1642 // Performs a minor collection in new generation.
1528 void Scavenge(); 1643 void Scavenge();
1529 1644
1530 static String* UpdateNewSpaceReferenceInExternalStringTableEntry( 1645 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1531 Heap* heap, 1646 Heap* heap,
1532 Object** pointer); 1647 Object** pointer);
1533 1648
1534 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front); 1649 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1650 static void ScavengeStoreBufferCallback(Heap* heap,
1651 MemoryChunk* page,
1652 StoreBufferEvent event);
1535 1653
1536 // Performs a major collection in the whole heap. 1654 // Performs a major collection in the whole heap.
1537 void MarkCompact(GCTracer* tracer); 1655 void MarkCompact(GCTracer* tracer);
1538 1656
1539 // Code to be run before and after mark-compact. 1657 // Code to be run before and after mark-compact.
1540 void MarkCompactPrologue(bool is_compacting); 1658 void MarkCompactPrologue();
1541
1542 // Completely clear the Instanceof cache (to stop it keeping objects alive
1543 // around a GC).
1544 inline void CompletelyClearInstanceofCache();
1545 1659
1546 // Record statistics before and after garbage collection. 1660 // Record statistics before and after garbage collection.
1547 void ReportStatisticsBeforeGC(); 1661 void ReportStatisticsBeforeGC();
1548 void ReportStatisticsAfterGC(); 1662 void ReportStatisticsAfterGC();
1549 1663
1550 // Slow part of scavenge object. 1664 // Slow part of scavenge object.
1551 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object); 1665 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1552 1666
1553 // Initializes a function with a shared part and prototype. 1667 // Initializes a function with a shared part and prototype.
1554 // Returns the function.
1555 // Note: this code was factored out of AllocateFunction such that 1668 // Note: this code was factored out of AllocateFunction such that
1556 // other parts of the VM could use it. Specifically, a function that creates 1669 // other parts of the VM could use it. Specifically, a function that creates
1557 // instances of type JS_FUNCTION_TYPE benefit from the use of this function. 1670 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1558 // Please note this does not perform a garbage collection. 1671 // Please note this does not perform a garbage collection.
1559 MUST_USE_RESULT inline MaybeObject* InitializeFunction( 1672 inline void InitializeFunction(
1560 JSFunction* function, 1673 JSFunction* function,
1561 SharedFunctionInfo* shared, 1674 SharedFunctionInfo* shared,
1562 Object* prototype); 1675 Object* prototype);
1563 1676
1564 // Total RegExp code ever generated 1677 // Total RegExp code ever generated
1565 double total_regexp_code_generated_; 1678 double total_regexp_code_generated_;
1566 1679
1567 GCTracer* tracer_; 1680 GCTracer* tracer_;
1568 1681
1569 1682
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1614 } 1727 }
1615 1728
1616 bool IsIncreasingSurvivalTrend() { 1729 bool IsIncreasingSurvivalTrend() {
1617 return survival_rate_trend() == INCREASING; 1730 return survival_rate_trend() == INCREASING;
1618 } 1731 }
1619 1732
1620 bool IsHighSurvivalRate() { 1733 bool IsHighSurvivalRate() {
1621 return high_survival_rate_period_length_ > 0; 1734 return high_survival_rate_period_length_ > 0;
1622 } 1735 }
1623 1736
1737 void SelectScavengingVisitorsTable();
1738
1624 static const int kInitialSymbolTableSize = 2048; 1739 static const int kInitialSymbolTableSize = 2048;
1625 static const int kInitialEvalCacheSize = 64; 1740 static const int kInitialEvalCacheSize = 64;
1626 1741
1627 // Maximum GC pause. 1742 // Maximum GC pause.
1628 int max_gc_pause_; 1743 int max_gc_pause_;
1629 1744
1630 // Maximum size of objects alive after GC. 1745 // Maximum size of objects alive after GC.
1631 intptr_t max_alive_after_gc_; 1746 intptr_t max_alive_after_gc_;
1632 1747
1633 // Minimal interval between two subsequent collections. 1748 // Minimal interval between two subsequent collections.
1634 int min_in_mutator_; 1749 int min_in_mutator_;
1635 1750
1636 // Size of objects alive after last GC. 1751 // Size of objects alive after last GC.
1637 intptr_t alive_after_last_gc_; 1752 intptr_t alive_after_last_gc_;
1638 1753
1639 double last_gc_end_timestamp_; 1754 double last_gc_end_timestamp_;
1640 1755
1641 MarkCompactCollector mark_compact_collector_; 1756 MarkCompactCollector mark_compact_collector_;
1642 1757
1643 // This field contains the meaning of the WATERMARK_INVALIDATED flag. 1758 StoreBuffer store_buffer_;
1644 // Instead of clearing this flag from all pages we just flip 1759
1645 // its meaning at the beginning of a scavenge. 1760 Marking marking_;
1646 intptr_t page_watermark_invalidated_mark_; 1761
1762 IncrementalMarking incremental_marking_;
1647 1763
1648 int number_idle_notifications_; 1764 int number_idle_notifications_;
1649 unsigned int last_idle_notification_gc_count_; 1765 unsigned int last_idle_notification_gc_count_;
1650 bool last_idle_notification_gc_count_init_; 1766 bool last_idle_notification_gc_count_init_;
1651 1767
1652 // Shared state read by the scavenge collector and set by ScavengeObject. 1768 // Shared state read by the scavenge collector and set by ScavengeObject.
1653 PromotionQueue promotion_queue_; 1769 PromotionQueue promotion_queue_;
1654 1770
1655 // Flag is set when the heap has been configured. The heap can be repeatedly 1771 // Flag is set when the heap has been configured. The heap can be repeatedly
1656 // configured through the API until it is setup. 1772 // configured through the API until it is setup.
1657 bool configured_; 1773 bool configured_;
1658 1774
1659 ExternalStringTable external_string_table_; 1775 ExternalStringTable external_string_table_;
1660 1776
1661 bool is_safe_to_read_maps_; 1777 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
1778
1779 MemoryChunk* chunks_queued_for_free_;
1662 1780
1663 friend class Factory; 1781 friend class Factory;
1664 friend class GCTracer; 1782 friend class GCTracer;
1665 friend class DisallowAllocationFailure; 1783 friend class DisallowAllocationFailure;
1666 friend class AlwaysAllocateScope; 1784 friend class AlwaysAllocateScope;
1667 friend class LinearAllocationScope; 1785 friend class LinearAllocationScope;
1668 friend class Page; 1786 friend class Page;
1669 friend class Isolate; 1787 friend class Isolate;
1670 friend class MarkCompactCollector; 1788 friend class MarkCompactCollector;
1671 friend class StaticMarkingVisitor; 1789 friend class StaticMarkingVisitor;
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
1750 void VisitPointers(Object** start, Object** end) { 1868 void VisitPointers(Object** start, Object** end) {
1751 for (Object** current = start; current < end; current++) { 1869 for (Object** current = start; current < end; current++) {
1752 if ((*current)->IsHeapObject()) { 1870 if ((*current)->IsHeapObject()) {
1753 HeapObject* object = HeapObject::cast(*current); 1871 HeapObject* object = HeapObject::cast(*current);
1754 ASSERT(HEAP->Contains(object)); 1872 ASSERT(HEAP->Contains(object));
1755 ASSERT(object->map()->IsMap()); 1873 ASSERT(object->map()->IsMap());
1756 } 1874 }
1757 } 1875 }
1758 } 1876 }
1759 }; 1877 };
1760
1761
1762 // Visitor class to verify interior pointers in spaces that use region marks
1763 // to keep track of intergenerational references.
1764 // As VerifyPointersVisitor but also checks that dirty marks are set
1765 // for regions covering intergenerational references.
1766 class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor {
1767 public:
1768 void VisitPointers(Object** start, Object** end) {
1769 for (Object** current = start; current < end; current++) {
1770 if ((*current)->IsHeapObject()) {
1771 HeapObject* object = HeapObject::cast(*current);
1772 ASSERT(HEAP->Contains(object));
1773 ASSERT(object->map()->IsMap());
1774 if (HEAP->InNewSpace(object)) {
1775 ASSERT(HEAP->InToSpace(object));
1776 Address addr = reinterpret_cast<Address>(current);
1777 ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr));
1778 }
1779 }
1780 }
1781 }
1782 };
1783 #endif 1878 #endif
1784 1879
1785 1880
1786 // Space iterator for iterating over all spaces of the heap. 1881 // Space iterator for iterating over all spaces of the heap.
1787 // Returns each space in turn, and null when it is done. 1882 // Returns each space in turn, and null when it is done.
1788 class AllSpaces BASE_EMBEDDED { 1883 class AllSpaces BASE_EMBEDDED {
1789 public: 1884 public:
1790 Space* next(); 1885 Space* next();
1791 AllSpaces() { counter_ = FIRST_SPACE; } 1886 AllSpaces() { counter_ = FIRST_SPACE; }
1792 private: 1887 private:
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after
2105 2200
2106 // Sets the collector. 2201 // Sets the collector.
2107 void set_collector(GarbageCollector collector) { collector_ = collector; } 2202 void set_collector(GarbageCollector collector) { collector_ = collector; }
2108 2203
2109 // Sets the GC count. 2204 // Sets the GC count.
2110 void set_gc_count(unsigned int count) { gc_count_ = count; } 2205 void set_gc_count(unsigned int count) { gc_count_ = count; }
2111 2206
2112 // Sets the full GC count. 2207 // Sets the full GC count.
2113 void set_full_gc_count(int count) { full_gc_count_ = count; } 2208 void set_full_gc_count(int count) { full_gc_count_ = count; }
2114 2209
2115 // Sets the flag that this is a compacting full GC.
2116 void set_is_compacting() { is_compacting_ = true; }
2117 bool is_compacting() const { return is_compacting_; }
2118
2119 // Increment and decrement the count of marked objects.
2120 void increment_marked_count() { ++marked_count_; }
2121 void decrement_marked_count() { --marked_count_; }
2122
2123 int marked_count() { return marked_count_; }
2124
2125 void increment_promoted_objects_size(int object_size) { 2210 void increment_promoted_objects_size(int object_size) {
2126 promoted_objects_size_ += object_size; 2211 promoted_objects_size_ += object_size;
2127 } 2212 }
2128 2213
2129 private: 2214 private:
2130 // Returns a string matching the collector. 2215 // Returns a string matching the collector.
2131 const char* CollectorString(); 2216 const char* CollectorString();
2132 2217
2133 // Returns size of object in heap (in MB). 2218 // Returns size of object in heap (in MB).
2134 double SizeOfHeapObjects() { 2219 double SizeOfHeapObjects() {
2135 return (static_cast<double>(HEAP->SizeOfObjects())) / MB; 2220 return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
2136 } 2221 }
2137 2222
2138 double start_time_; // Timestamp set in the constructor. 2223 double start_time_; // Timestamp set in the constructor.
2139 intptr_t start_size_; // Size of objects in heap set in constructor. 2224 intptr_t start_size_; // Size of objects in heap set in constructor.
2140 GarbageCollector collector_; // Type of collector. 2225 GarbageCollector collector_; // Type of collector.
2141 2226
2142 // A count (including this one, eg, the first collection is 1) of the 2227 // A count (including this one, eg, the first collection is 1) of the
2143 // number of garbage collections. 2228 // number of garbage collections.
2144 unsigned int gc_count_; 2229 unsigned int gc_count_;
2145 2230
2146 // A count (including this one) of the number of full garbage collections. 2231 // A count (including this one) of the number of full garbage collections.
2147 int full_gc_count_; 2232 int full_gc_count_;
2148 2233
2149 // True if the current GC is a compacting full collection, false
2150 // otherwise.
2151 bool is_compacting_;
2152
2153 // True if the *previous* full GC cwas a compacting collection (will be
2154 // false if there has not been a previous full GC).
2155 bool previous_has_compacted_;
2156
2157 // On a full GC, a count of the number of marked objects. Incremented
2158 // when an object is marked and decremented when an object's mark bit is
2159 // cleared. Will be zero on a scavenge collection.
2160 int marked_count_;
2161
2162 // The count from the end of the previous full GC. Will be zero if there
2163 // was no previous full GC.
2164 int previous_marked_count_;
2165
2166 // Amounts of time spent in different scopes during GC. 2234 // Amounts of time spent in different scopes during GC.
2167 double scopes_[Scope::kNumberOfScopes]; 2235 double scopes_[Scope::kNumberOfScopes];
2168 2236
2169 // Total amount of space either wasted or contained in one of free lists 2237 // Total amount of space either wasted or contained in one of free lists
2170 // before the current GC. 2238 // before the current GC.
2171 intptr_t in_free_list_or_wasted_before_gc_; 2239 intptr_t in_free_list_or_wasted_before_gc_;
2172 2240
2173 // Difference between space used in the heap at the beginning of the current 2241 // Difference between space used in the heap at the beginning of the current
2174 // collection and the end of the previous collection. 2242 // collection and the end of the previous collection.
2175 intptr_t allocated_since_last_gc_; 2243 intptr_t allocated_since_last_gc_;
2176 2244
2177 // Amount of time spent in mutator that is time elapsed between end of the 2245 // Amount of time spent in mutator that is time elapsed between end of the
2178 // previous collection and the beginning of the current one. 2246 // previous collection and the beginning of the current one.
2179 double spent_in_mutator_; 2247 double spent_in_mutator_;
2180 2248
2181 // Size of objects promoted during the current collection. 2249 // Size of objects promoted during the current collection.
2182 intptr_t promoted_objects_size_; 2250 intptr_t promoted_objects_size_;
2183 2251
2252 // Incremental marking steps counters.
2253 int steps_count_;
2254 double steps_took_;
2255 double longest_step_;
2256 int steps_count_since_last_gc_;
2257 double steps_took_since_last_gc_;
2258
2184 Heap* heap_; 2259 Heap* heap_;
2185 }; 2260 };
2186 2261
2187 2262
2188 class StringSplitCache { 2263 class StringSplitCache {
2189 public: 2264 public:
2190 static Object* Lookup(FixedArray* cache, String* string, String* pattern); 2265 static Object* Lookup(FixedArray* cache, String* string, String* pattern);
2191 static void Enter(Heap* heap, 2266 static void Enter(Heap* heap,
2192 FixedArray* cache, 2267 FixedArray* cache,
2193 String* string, 2268 String* string,
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
2285 public: 2360 public:
2286 virtual ~WeakObjectRetainer() {} 2361 virtual ~WeakObjectRetainer() {}
2287 2362
2288 // Return whether this object should be retained. If NULL is returned the 2363 // Return whether this object should be retained. If NULL is returned the
2289 // object has no references. Otherwise the address of the retained object 2364 // object has no references. Otherwise the address of the retained object
2290 // should be returned as in some GC situations the object has been moved. 2365 // should be returned as in some GC situations the object has been moved.
2291 virtual Object* RetainAs(Object* object) = 0; 2366 virtual Object* RetainAs(Object* object) = 0;
2292 }; 2367 };
2293 2368
2294 2369
2370 // Intrusive object marking uses least significant bit of
2371 // heap object's map word to mark objects.
2372 // Normally all map words have least significant bit set
2373 // because they contain tagged map pointer.
2374 // If the bit is not set object is marked.
2375 // All objects should be unmarked before resuming
2376 // JavaScript execution.
2377 class IntrusiveMarking {
2378 public:
2379 static bool IsMarked(HeapObject* object) {
2380 return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2381 }
2382
2383 static void ClearMark(HeapObject* object) {
2384 uintptr_t map_word = object->map_word().ToRawValue();
2385 object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2386 ASSERT(!IsMarked(object));
2387 }
2388
2389 static void SetMark(HeapObject* object) {
2390 uintptr_t map_word = object->map_word().ToRawValue();
2391 object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2392 ASSERT(IsMarked(object));
2393 }
2394
2395 static Map* MapOfMarkedObject(HeapObject* object) {
2396 uintptr_t map_word = object->map_word().ToRawValue();
2397 return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2398 }
2399
2400 static int SizeOfMarkedObject(HeapObject* object) {
2401 return object->SizeFromMap(MapOfMarkedObject(object));
2402 }
2403
2404 private:
2405 static const uintptr_t kNotMarkedBit = 0x1;
2406 STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);
2407 };
2408
2409
2295 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST) 2410 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
2296 // Helper class for tracing paths to a search target Object from all roots. 2411 // Helper class for tracing paths to a search target Object from all roots.
2297 // The TracePathFrom() method can be used to trace paths from a specific 2412 // The TracePathFrom() method can be used to trace paths from a specific
2298 // object to the search target object. 2413 // object to the search target object.
2299 class PathTracer : public ObjectVisitor { 2414 class PathTracer : public ObjectVisitor {
2300 public: 2415 public:
2301 enum WhatToFind { 2416 enum WhatToFind {
2302 FIND_ALL, // Will find all matches. 2417 FIND_ALL, // Will find all matches.
2303 FIND_FIRST // Will stop the search after first match. 2418 FIND_FIRST // Will stop the search after first match.
2304 }; 2419 };
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
2343 WhatToFind what_to_find_; 2458 WhatToFind what_to_find_;
2344 VisitMode visit_mode_; 2459 VisitMode visit_mode_;
2345 List<Object*> object_stack_; 2460 List<Object*> object_stack_;
2346 2461
2347 AssertNoAllocation no_alloc; // i.e. no gc allowed. 2462 AssertNoAllocation no_alloc; // i.e. no gc allowed.
2348 2463
2349 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); 2464 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2350 }; 2465 };
2351 #endif // DEBUG || LIVE_OBJECT_LIST 2466 #endif // DEBUG || LIVE_OBJECT_LIST
2352 2467
2353
2354 } } // namespace v8::internal 2468 } } // namespace v8::internal
2355 2469
2356 #undef HEAP 2470 #undef HEAP
2357 2471
2358 #endif // V8_HEAP_H_ 2472 #endif // V8_HEAP_H_
OLDNEW
« no previous file with comments | « src/handles.cc ('k') | src/heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698