| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 402 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 413 #endif | 413 #endif |
| 414 } | 414 } |
| 415 | 415 |
| 416 | 416 |
| 417 void Heap::CollectAllGarbage(bool force_compaction, | 417 void Heap::CollectAllGarbage(bool force_compaction, |
| 418 CollectionPolicy collectionPolicy) { | 418 CollectionPolicy collectionPolicy) { |
| 419 // Since we are ignoring the return value, the exact choice of space does | 419 // Since we are ignoring the return value, the exact choice of space does |
| 420 // not matter, so long as we do not specify NEW_SPACE, which would not | 420 // not matter, so long as we do not specify NEW_SPACE, which would not |
| 421 // cause a full GC. | 421 // cause a full GC. |
| 422 MarkCompactCollector::SetForceCompaction(force_compaction); | 422 MarkCompactCollector::SetForceCompaction(force_compaction); |
| 423 CollectGarbage(0, OLD_POINTER_SPACE, collectionPolicy); | 423 CollectGarbage(OLD_POINTER_SPACE, collectionPolicy); |
| 424 MarkCompactCollector::SetForceCompaction(false); | 424 MarkCompactCollector::SetForceCompaction(false); |
| 425 } | 425 } |
| 426 | 426 |
| 427 | 427 |
| 428 void Heap::CollectAllAvailableGarbage() { | 428 void Heap::CollectAllAvailableGarbage() { |
| 429 CompilationCache::Clear(); | 429 CompilationCache::Clear(); |
| 430 CollectAllGarbage(true, AGGRESSIVE); | 430 CollectAllGarbage(true, AGGRESSIVE); |
| 431 } | 431 } |
| 432 | 432 |
| 433 | 433 |
| 434 bool Heap::CollectGarbage(int requested_size, | 434 void Heap::CollectGarbage(AllocationSpace space, |
| 435 AllocationSpace space, | |
| 436 CollectionPolicy collectionPolicy) { | 435 CollectionPolicy collectionPolicy) { |
| 437 // The VM is in the GC state until exiting this function. | 436 // The VM is in the GC state until exiting this function. |
| 438 VMState state(GC); | 437 VMState state(GC); |
| 439 | 438 |
| 440 #ifdef DEBUG | 439 #ifdef DEBUG |
| 441 // Reset the allocation timeout to the GC interval, but make sure to | 440 // Reset the allocation timeout to the GC interval, but make sure to |
| 442 // allow at least a few allocations after a collection. The reason | 441 // allow at least a few allocations after a collection. The reason |
| 443 // for this is that we have a lot of allocation sequences and we | 442 // for this is that we have a lot of allocation sequences and we |
| 444 // assume that a garbage collection will allow the subsequent | 443 // assume that a garbage collection will allow the subsequent |
| 445 // allocation attempts to go through. | 444 // allocation attempts to go through. |
| (...skipping 17 matching lines...) Expand all Loading... |
| 463 PerformGarbageCollection(collector, &tracer, collectionPolicy); | 462 PerformGarbageCollection(collector, &tracer, collectionPolicy); |
| 464 rate->Stop(); | 463 rate->Stop(); |
| 465 | 464 |
| 466 GarbageCollectionEpilogue(); | 465 GarbageCollectionEpilogue(); |
| 467 } | 466 } |
| 468 | 467 |
| 469 | 468 |
| 470 #ifdef ENABLE_LOGGING_AND_PROFILING | 469 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 471 if (FLAG_log_gc) HeapProfiler::WriteSample(); | 470 if (FLAG_log_gc) HeapProfiler::WriteSample(); |
| 472 #endif | 471 #endif |
| 473 | |
| 474 switch (space) { | |
| 475 case NEW_SPACE: | |
| 476 return new_space_.Available() >= requested_size; | |
| 477 case OLD_POINTER_SPACE: | |
| 478 return old_pointer_space_->Available() >= requested_size; | |
| 479 case OLD_DATA_SPACE: | |
| 480 return old_data_space_->Available() >= requested_size; | |
| 481 case CODE_SPACE: | |
| 482 return code_space_->Available() >= requested_size; | |
| 483 case MAP_SPACE: | |
| 484 return map_space_->Available() >= requested_size; | |
| 485 case CELL_SPACE: | |
| 486 return cell_space_->Available() >= requested_size; | |
| 487 case LO_SPACE: | |
| 488 return lo_space_->Available() >= requested_size; | |
| 489 } | |
| 490 return false; | |
| 491 } | 472 } |
| 492 | 473 |
| 493 | 474 |
| 494 void Heap::PerformScavenge() { | 475 void Heap::PerformScavenge() { |
| 495 GCTracer tracer; | 476 GCTracer tracer; |
| 496 PerformGarbageCollection(SCAVENGER, &tracer, NORMAL); | 477 PerformGarbageCollection(SCAVENGER, &tracer, NORMAL); |
| 497 } | 478 } |
| 498 | 479 |
| 499 | 480 |
| 500 #ifdef DEBUG | 481 #ifdef DEBUG |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 535 PagedSpace* old_pointer_space = Heap::old_pointer_space(); | 516 PagedSpace* old_pointer_space = Heap::old_pointer_space(); |
| 536 PagedSpace* old_data_space = Heap::old_data_space(); | 517 PagedSpace* old_data_space = Heap::old_data_space(); |
| 537 PagedSpace* code_space = Heap::code_space(); | 518 PagedSpace* code_space = Heap::code_space(); |
| 538 PagedSpace* map_space = Heap::map_space(); | 519 PagedSpace* map_space = Heap::map_space(); |
| 539 PagedSpace* cell_space = Heap::cell_space(); | 520 PagedSpace* cell_space = Heap::cell_space(); |
| 540 LargeObjectSpace* lo_space = Heap::lo_space(); | 521 LargeObjectSpace* lo_space = Heap::lo_space(); |
| 541 bool gc_performed = true; | 522 bool gc_performed = true; |
| 542 while (gc_performed) { | 523 while (gc_performed) { |
| 543 gc_performed = false; | 524 gc_performed = false; |
| 544 if (!new_space->ReserveSpace(new_space_size)) { | 525 if (!new_space->ReserveSpace(new_space_size)) { |
| 545 Heap::CollectGarbage(new_space_size, NEW_SPACE); | 526 Heap::CollectGarbage(NEW_SPACE); |
| 546 gc_performed = true; | 527 gc_performed = true; |
| 547 } | 528 } |
| 548 if (!old_pointer_space->ReserveSpace(pointer_space_size)) { | 529 if (!old_pointer_space->ReserveSpace(pointer_space_size)) { |
| 549 Heap::CollectGarbage(pointer_space_size, OLD_POINTER_SPACE); | 530 Heap::CollectGarbage(OLD_POINTER_SPACE); |
| 550 gc_performed = true; | 531 gc_performed = true; |
| 551 } | 532 } |
| 552 if (!(old_data_space->ReserveSpace(data_space_size))) { | 533 if (!(old_data_space->ReserveSpace(data_space_size))) { |
| 553 Heap::CollectGarbage(data_space_size, OLD_DATA_SPACE); | 534 Heap::CollectGarbage(OLD_DATA_SPACE); |
| 554 gc_performed = true; | 535 gc_performed = true; |
| 555 } | 536 } |
| 556 if (!(code_space->ReserveSpace(code_space_size))) { | 537 if (!(code_space->ReserveSpace(code_space_size))) { |
| 557 Heap::CollectGarbage(code_space_size, CODE_SPACE); | 538 Heap::CollectGarbage(CODE_SPACE); |
| 558 gc_performed = true; | 539 gc_performed = true; |
| 559 } | 540 } |
| 560 if (!(map_space->ReserveSpace(map_space_size))) { | 541 if (!(map_space->ReserveSpace(map_space_size))) { |
| 561 Heap::CollectGarbage(map_space_size, MAP_SPACE); | 542 Heap::CollectGarbage(MAP_SPACE); |
| 562 gc_performed = true; | 543 gc_performed = true; |
| 563 } | 544 } |
| 564 if (!(cell_space->ReserveSpace(cell_space_size))) { | 545 if (!(cell_space->ReserveSpace(cell_space_size))) { |
| 565 Heap::CollectGarbage(cell_space_size, CELL_SPACE); | 546 Heap::CollectGarbage(CELL_SPACE); |
| 566 gc_performed = true; | 547 gc_performed = true; |
| 567 } | 548 } |
| 568 // We add a slack-factor of 2 in order to have space for a series of | 549 // We add a slack-factor of 2 in order to have space for a series of |
| 569 // large-object allocations that are only just larger than the page size. | 550 // large-object allocations that are only just larger than the page size. |
| 570 large_object_size *= 2; | 551 large_object_size *= 2; |
| 571 // The ReserveSpace method on the large object space checks how much | 552 // The ReserveSpace method on the large object space checks how much |
| 572 // we can expand the old generation. This includes expansion caused by | 553 // we can expand the old generation. This includes expansion caused by |
| 573 // allocation in the other spaces. | 554 // allocation in the other spaces. |
| 574 large_object_size += cell_space_size + map_space_size + code_space_size + | 555 large_object_size += cell_space_size + map_space_size + code_space_size + |
| 575 data_space_size + pointer_space_size; | 556 data_space_size + pointer_space_size; |
| 576 if (!(lo_space->ReserveSpace(large_object_size))) { | 557 if (!(lo_space->ReserveSpace(large_object_size))) { |
| 577 Heap::CollectGarbage(large_object_size, LO_SPACE); | 558 Heap::CollectGarbage(LO_SPACE); |
| 578 gc_performed = true; | 559 gc_performed = true; |
| 579 } | 560 } |
| 580 } | 561 } |
| 581 } | 562 } |
| 582 | 563 |
| 583 | 564 |
| 584 void Heap::EnsureFromSpaceIsCommitted() { | 565 void Heap::EnsureFromSpaceIsCommitted() { |
| 585 if (new_space_.CommitFromSpaceIfNeeded()) return; | 566 if (new_space_.CommitFromSpaceIfNeeded()) return; |
| 586 | 567 |
| 587 // Committing memory to from space failed. | 568 // Committing memory to from space failed. |
| (...skipping 2836 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3424 } else { | 3405 } else { |
| 3425 number_idle_notifications = 0; | 3406 number_idle_notifications = 0; |
| 3426 last_gc_count = gc_count_; | 3407 last_gc_count = gc_count_; |
| 3427 } | 3408 } |
| 3428 | 3409 |
| 3429 if (number_idle_notifications == kIdlesBeforeScavenge) { | 3410 if (number_idle_notifications == kIdlesBeforeScavenge) { |
| 3430 if (contexts_disposed_ > 0) { | 3411 if (contexts_disposed_ > 0) { |
| 3431 HistogramTimerScope scope(&Counters::gc_context); | 3412 HistogramTimerScope scope(&Counters::gc_context); |
| 3432 CollectAllGarbage(false); | 3413 CollectAllGarbage(false); |
| 3433 } else { | 3414 } else { |
| 3434 CollectGarbage(0, NEW_SPACE); | 3415 CollectGarbage(NEW_SPACE); |
| 3435 } | 3416 } |
| 3436 new_space_.Shrink(); | 3417 new_space_.Shrink(); |
| 3437 last_gc_count = gc_count_; | 3418 last_gc_count = gc_count_; |
| 3438 | 3419 |
| 3439 } else if (number_idle_notifications == kIdlesBeforeMarkSweep) { | 3420 } else if (number_idle_notifications == kIdlesBeforeMarkSweep) { |
| 3440 // Before doing the mark-sweep collections we clear the | 3421 // Before doing the mark-sweep collections we clear the |
| 3441 // compilation cache to avoid hanging on to source code and | 3422 // compilation cache to avoid hanging on to source code and |
| 3442 // generated code for cached functions. | 3423 // generated code for cached functions. |
| 3443 CompilationCache::Clear(); | 3424 CompilationCache::Clear(); |
| 3444 | 3425 |
| (...skipping 1485 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4930 } | 4911 } |
| 4931 | 4912 |
| 4932 | 4913 |
| 4933 DescriptorLookupCache::Key | 4914 DescriptorLookupCache::Key |
| 4934 DescriptorLookupCache::keys_[DescriptorLookupCache::kLength]; | 4915 DescriptorLookupCache::keys_[DescriptorLookupCache::kLength]; |
| 4935 | 4916 |
| 4936 int DescriptorLookupCache::results_[DescriptorLookupCache::kLength]; | 4917 int DescriptorLookupCache::results_[DescriptorLookupCache::kLength]; |
| 4937 | 4918 |
| 4938 | 4919 |
| 4939 #ifdef DEBUG | 4920 #ifdef DEBUG |
| 4940 bool Heap::GarbageCollectionGreedyCheck() { | 4921 void Heap::GarbageCollectionGreedyCheck() { |
| 4941 ASSERT(FLAG_gc_greedy); | 4922 ASSERT(FLAG_gc_greedy); |
| 4942 if (Bootstrapper::IsActive()) return true; | 4923 if (Bootstrapper::IsActive()) return; |
| 4943 if (disallow_allocation_failure()) return true; | 4924 if (disallow_allocation_failure()) return; |
| 4944 return CollectGarbage(0, NEW_SPACE); | 4925 CollectGarbage(NEW_SPACE); |
| 4945 } | 4926 } |
| 4946 #endif | 4927 #endif |
| 4947 | 4928 |
| 4948 | 4929 |
| 4949 TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t) | 4930 TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t) |
| 4950 : type_(t) { | 4931 : type_(t) { |
| 4951 uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't | 4932 uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't |
| 4952 uint32_t in1 = 0xffffffffu; // generated by the FPU. | 4933 uint32_t in1 = 0xffffffffu; // generated by the FPU. |
| 4953 for (int i = 0; i < kCacheSize; i++) { | 4934 for (int i = 0; i < kCacheSize; i++) { |
| 4954 elements_[i].in[0] = in0; | 4935 elements_[i].in[0] = in0; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4996 void ExternalStringTable::TearDown() { | 4977 void ExternalStringTable::TearDown() { |
| 4997 new_space_strings_.Free(); | 4978 new_space_strings_.Free(); |
| 4998 old_space_strings_.Free(); | 4979 old_space_strings_.Free(); |
| 4999 } | 4980 } |
| 5000 | 4981 |
| 5001 | 4982 |
| 5002 List<Object*> ExternalStringTable::new_space_strings_; | 4983 List<Object*> ExternalStringTable::new_space_strings_; |
| 5003 List<Object*> ExternalStringTable::old_space_strings_; | 4984 List<Object*> ExternalStringTable::old_space_strings_; |
| 5004 | 4985 |
| 5005 } } // namespace v8::internal | 4986 } } // namespace v8::internal |
| OLD | NEW |