Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(449)

Side by Side Diff: src/mark-compact.h

Issue 7189066: Simple non-incremental compaction by evacuation. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
249 // (mod mask + 1). 249 // (mod mask + 1).
250 int top_; 250 int top_;
251 int bottom_; 251 int bottom_;
252 int mask_; 252 int mask_;
253 bool overflowed_; 253 bool overflowed_;
254 254
255 DISALLOW_COPY_AND_ASSIGN(MarkingDeque); 255 DISALLOW_COPY_AND_ASSIGN(MarkingDeque);
256 }; 256 };
257 257
258 258
259 class SlotsBuffer {
260 public:
261 typedef Object** ObjectSlot;
262
263 SlotsBuffer();
264 ~SlotsBuffer();
265
266 void Clear();
267 void Add(ObjectSlot slot);
268 void Iterate(ObjectVisitor* visitor);
269 void Report();
270 private:
Erik Corry 2011/06/20 20:41:26 missing blank line before private:
Vyacheslav Egorov (Chromium) 2011/06/21 11:44:48 Done.
271 static const int kBufferSize = 1024;
272
273 List<ObjectSlot*> buffers_;
274 ObjectSlot* buffer_;
275
276 int idx_;
277 int buffer_idx_;
278 };
279
280
259 // ------------------------------------------------------------------------- 281 // -------------------------------------------------------------------------
260 // Mark-Compact collector 282 // Mark-Compact collector
261 class MarkCompactCollector { 283 class MarkCompactCollector {
262 public: 284 public:
263 // Type of functions to compute forwarding addresses of objects in 285 // Type of functions to compute forwarding addresses of objects in
264 // compacted spaces. Given an object and its size, return a (non-failure) 286 // compacted spaces. Given an object and its size, return a (non-failure)
265 // Object* that will be the object after forwarding. There is a separate 287 // Object* that will be the object after forwarding. There is a separate
266 // allocation function for each (compactable) space based on the location 288 // allocation function for each (compactable) space based on the location
267 // of the object before compaction. 289 // of the object before compaction.
268 typedef MaybeObject* (*AllocationFunction)(Heap* heap, 290 typedef MaybeObject* (*AllocationFunction)(Heap* heap,
(...skipping 23 matching lines...) Expand all
292 // Set the global force_compaction flag, it must be called before Prepare 314 // Set the global force_compaction flag, it must be called before Prepare
293 // to take effect. 315 // to take effect.
294 inline void SetFlags(int flags); 316 inline void SetFlags(int flags);
295 317
296 inline bool PreciseSweepingRequired() { 318 inline bool PreciseSweepingRequired() {
297 return sweep_precisely_; 319 return sweep_precisely_;
298 } 320 }
299 321
300 static void Initialize(); 322 static void Initialize();
301 323
324 void CollectEvacuationCandidates(PagedSpace* space);
325
326 void AddEvacuationCandidate(Page* p);
327
302 // Prepares for GC by resetting relocation info in old and map spaces and 328 // Prepares for GC by resetting relocation info in old and map spaces and
303 // choosing spaces to compact. 329 // choosing spaces to compact.
304 void Prepare(GCTracer* tracer); 330 void Prepare(GCTracer* tracer);
305 331
306 // Performs a global garbage collection. 332 // Performs a global garbage collection.
307 void CollectGarbage(); 333 void CollectGarbage();
308 334
309 // True if the last full GC performed heap compaction.
310 bool HasCompacted() { return compacting_collection_; }
311
312 // True after the Prepare phase if the compaction is taking place.
313 bool IsCompacting() {
314 #ifdef DEBUG
315 // For the purposes of asserts we don't want this to keep returning true
316 // after the collection is completed.
317 return state_ != IDLE && compacting_collection_;
318 #else
319 return compacting_collection_;
320 #endif
321 }
322
323 // During a full GC, there is a stack-allocated GCTracer that is used for 335 // During a full GC, there is a stack-allocated GCTracer that is used for
324 // bookkeeping information. Return a pointer to that tracer. 336 // bookkeeping information. Return a pointer to that tracer.
325 GCTracer* tracer() { return tracer_; } 337 GCTracer* tracer() { return tracer_; }
326 338
327 #ifdef DEBUG 339 #ifdef DEBUG
328 // Checks whether performing mark-compact collection. 340 // Checks whether performing mark-compact collection.
329 bool in_use() { return state_ > PREPARE_GC; } 341 bool in_use() { return state_ > PREPARE_GC; }
330 bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; } 342 bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
331 #endif 343 #endif
332 344
333 // Determine type of object and emit deletion log event. 345 // Determine type of object and emit deletion log event.
334 static void ReportDeleteIfNeeded(HeapObject* obj, Isolate* isolate); 346 static void ReportDeleteIfNeeded(HeapObject* obj, Isolate* isolate);
335 347
336 // Distinguishable invalid map encodings (for single word and multiple words) 348 // Distinguishable invalid map encodings (for single word and multiple words)
337 // that indicate free regions. 349 // that indicate free regions.
338 static const uint32_t kSingleFreeEncoding = 0; 350 static const uint32_t kSingleFreeEncoding = 0;
339 static const uint32_t kMultiFreeEncoding = 1; 351 static const uint32_t kMultiFreeEncoding = 1;
340 352
341 inline bool IsMarked(Object* obj); 353 static inline bool IsMarked(Object* obj);
342 354
343 inline Heap* heap() const { return heap_; } 355 inline Heap* heap() const { return heap_; }
344 356
345 CodeFlusher* code_flusher() { return code_flusher_; } 357 CodeFlusher* code_flusher() { return code_flusher_; }
346 inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; } 358 inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }
347 void EnableCodeFlushing(bool enable); 359 void EnableCodeFlushing(bool enable);
348 360
349 enum SweeperType { 361 enum SweeperType {
350 CONSERVATIVE, 362 CONSERVATIVE,
351 LAZY_CONSERVATIVE, 363 LAZY_CONSERVATIVE,
352 PRECISE 364 PRECISE
353 }; 365 };
354 366
355 // Sweep a single page from the given space conservatively. 367 // Sweep a single page from the given space conservatively.
356 // Return a number of reclaimed bytes. 368 // Return a number of reclaimed bytes.
357 static int SweepConservatively(PagedSpace* space, Page* p); 369 static int SweepConservatively(PagedSpace* space, Page* p);
358 370
371 INLINE(static bool IsOnEvacuationCandidateOrInNewSpace(Address anchor)) {
372 return Page::FromAddress(anchor)->IsEvacuationCandidateOrNewSpace();
373 }
374
375 INLINE(static bool IsOnEvacuationCandidate(Object* obj)) {
376 return Page::FromAddress(reinterpret_cast<Address>(obj))->
377 IsEvacuationCandidate();
378 }
379
380 INLINE(void RecordSlot(Address anchor, Object** slot, Object* object)) {
381 if (IsOnEvacuationCandidate(object) &&
382 !IsOnEvacuationCandidateOrInNewSpace(anchor)) {
383 slots_buffer_.Add(slot);
384 }
385 }
386
387 void MigrateObject(Address dst,
388 Address src,
389 int size,
390 AllocationSpace to_old_space);
391
392 bool TryPromoteObject(HeapObject* object, int object_size);
393
359 private: 394 private:
360 MarkCompactCollector(); 395 MarkCompactCollector();
361 ~MarkCompactCollector(); 396 ~MarkCompactCollector();
362 397
363 #ifdef DEBUG 398 #ifdef DEBUG
364 enum CollectorState { 399 enum CollectorState {
365 IDLE, 400 IDLE,
366 PREPARE_GC, 401 PREPARE_GC,
367 MARK_LIVE_OBJECTS, 402 MARK_LIVE_OBJECTS,
368 SWEEP_SPACES, 403 SWEEP_SPACES,
369 ENCODE_FORWARDING_ADDRESSES, 404 ENCODE_FORWARDING_ADDRESSES,
370 UPDATE_POINTERS, 405 UPDATE_POINTERS,
371 RELOCATE_OBJECTS 406 RELOCATE_OBJECTS
372 }; 407 };
373 408
374 // The current stage of the collector. 409 // The current stage of the collector.
375 CollectorState state_; 410 CollectorState state_;
376 #endif 411 #endif
377 412
378 // Global flag that forces a compaction.
379 bool force_compaction_;
380
381 // Global flag that forces sweeping to be precise, so we can traverse the 413 // Global flag that forces sweeping to be precise, so we can traverse the
382 // heap. 414 // heap.
383 bool sweep_precisely_; 415 bool sweep_precisely_;
384 416
385 // Global flag indicating whether spaces were compacted on the last GC.
386 bool compacting_collection_;
387
388 // Global flag indicating whether spaces will be compacted on the next GC.
389 bool compact_on_next_gc_;
390
391 // The number of objects left marked at the end of the last completed full
392 // GC (expected to be zero).
393 int previous_marked_count_;
394
395 // A pointer to the current stack-allocated GC tracer object during a full 417 // A pointer to the current stack-allocated GC tracer object during a full
396 // collection (NULL before and after). 418 // collection (NULL before and after).
397 GCTracer* tracer_; 419 GCTracer* tracer_;
398 420
421 SlotsBuffer slots_buffer_;
422
399 // Finishes GC, performs heap verification if enabled. 423 // Finishes GC, performs heap verification if enabled.
400 void Finish(); 424 void Finish();
401 425
402 // ----------------------------------------------------------------------- 426 // -----------------------------------------------------------------------
403 // Phase 1: Marking live objects. 427 // Phase 1: Marking live objects.
404 // 428 //
405 // Before: The heap has been prepared for garbage collection by 429 // Before: The heap has been prepared for garbage collection by
406 // MarkCompactCollector::Prepare() and is otherwise in its 430 // MarkCompactCollector::Prepare() and is otherwise in its
407 // normal state. 431 // normal state.
408 // 432 //
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
474 void RefillMarkingDeque(); 498 void RefillMarkingDeque();
475 499
476 // Callback function for telling whether the object *p is an unmarked 500 // Callback function for telling whether the object *p is an unmarked
477 // heap object. 501 // heap object.
478 static bool IsUnmarkedHeapObject(Object** p); 502 static bool IsUnmarkedHeapObject(Object** p);
479 503
480 #ifdef DEBUG 504 #ifdef DEBUG
481 void UpdateLiveObjectCount(HeapObject* obj); 505 void UpdateLiveObjectCount(HeapObject* obj);
482 #endif 506 #endif
483 507
484 // Test whether a (possibly marked) object is a Map.
485 static inline bool SafeIsMap(HeapObject* object);
486
487 // Map transitions from a live map to a dead map must be killed. 508 // Map transitions from a live map to a dead map must be killed.
488 // We replace them with a null descriptor, with the same key. 509 // We replace them with a null descriptor, with the same key.
489 void ClearNonLiveTransitions(); 510 void ClearNonLiveTransitions();
490 511
491 // ----------------------------------------------------------------------- 512 // -----------------------------------------------------------------------
492 // Phase 2: Sweeping to clear mark bits and free non-live objects for 513 // Phase 2: Sweeping to clear mark bits and free non-live objects for
493 // a non-compacting collection. 514 // a non-compacting collection.
494 // 515 //
495 // Before: Live objects are marked and non-live objects are unmarked. 516 // Before: Live objects are marked and non-live objects are unmarked.
496 // 517 //
(...skipping 12 matching lines...) Expand all
509 // Iterates the live objects between a range of addresses, returning the 530 // Iterates the live objects between a range of addresses, returning the
510 // number of live objects. 531 // number of live objects.
511 int IterateLiveObjectsInRange(Address start, Address end, 532 int IterateLiveObjectsInRange(Address start, Address end,
512 LiveObjectCallback size_func); 533 LiveObjectCallback size_func);
513 534
514 // If we are not compacting the heap, we simply sweep the spaces except 535 // If we are not compacting the heap, we simply sweep the spaces except
515 // for the large object space, clearing mark bits and adding unmarked 536 // for the large object space, clearing mark bits and adding unmarked
516 // regions to each space's free list. 537 // regions to each space's free list.
517 void SweepSpaces(); 538 void SweepSpaces();
518 539
519 void SweepNewSpace(NewSpace* space); 540 void EvacuateNewSpace();
520 541
542 void EvacuateLiveObjectsFromPage(Page* p);
543
544 void EvacuatePages();
545
546 void EvacuateNewSpaceAndCandidates();
521 547
522 void SweepSpace(PagedSpace* space, SweeperType sweeper); 548 void SweepSpace(PagedSpace* space, SweeperType sweeper);
523 549
524 550
525 #ifdef DEBUG 551 #ifdef DEBUG
526 // ----------------------------------------------------------------------- 552 // -----------------------------------------------------------------------
527 // Debugging variables, functions and classes 553 // Debugging variables, functions and classes
528 // Counters used for debugging the marking phase of mark-compact or 554 // Counters used for debugging the marking phase of mark-compact or
529 // mark-sweep collection. 555 // mark-sweep collection.
530 556
(...skipping 25 matching lines...) Expand all
556 static void VisitObject(HeapObject* obj); 582 static void VisitObject(HeapObject* obj);
557 583
558 friend class UnmarkObjectVisitor; 584 friend class UnmarkObjectVisitor;
559 static void UnmarkObject(HeapObject* obj); 585 static void UnmarkObject(HeapObject* obj);
560 #endif 586 #endif
561 587
562 Heap* heap_; 588 Heap* heap_;
563 MarkingDeque marking_deque_; 589 MarkingDeque marking_deque_;
564 CodeFlusher* code_flusher_; 590 CodeFlusher* code_flusher_;
565 591
592 List<Page*> evacuation_candidates_;
593
566 friend class Heap; 594 friend class Heap;
567 }; 595 };
568 596
569 597
570 } } // namespace v8::internal 598 } } // namespace v8::internal
571 599
572 #endif // V8_MARK_COMPACT_H_ 600 #endif // V8_MARK_COMPACT_H_
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698