Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(143)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1225733002: Reland concurrent sweeping of code space. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/store-buffer.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/compilation-cache.h" 10 #include "src/compilation-cache.h"
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after
219 VerifyEvacuation(heap->new_space()); 219 VerifyEvacuation(heap->new_space());
220 220
221 VerifyEvacuationVisitor visitor; 221 VerifyEvacuationVisitor visitor;
222 heap->IterateStrongRoots(&visitor, VISIT_ALL); 222 heap->IterateStrongRoots(&visitor, VISIT_ALL);
223 } 223 }
224 #endif // VERIFY_HEAP 224 #endif // VERIFY_HEAP
225 225
226 226
227 void MarkCompactCollector::SetUp() { 227 void MarkCompactCollector::SetUp() {
228 free_list_old_space_.Reset(new FreeList(heap_->old_space())); 228 free_list_old_space_.Reset(new FreeList(heap_->old_space()));
229 free_list_code_space_.Reset(new FreeList(heap_->code_space()));
229 EnsureMarkingDequeIsReserved(); 230 EnsureMarkingDequeIsReserved();
230 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize); 231 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize);
231 } 232 }
232 233
233 234
234 void MarkCompactCollector::TearDown() { 235 void MarkCompactCollector::TearDown() {
235 AbortCompaction(); 236 AbortCompaction();
236 delete marking_deque_memory_; 237 delete marking_deque_memory_;
237 } 238 }
238 239
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
359 ClearInvalidStoreAndSlotsBufferEntries(); 360 ClearInvalidStoreAndSlotsBufferEntries();
360 361
361 #ifdef VERIFY_HEAP 362 #ifdef VERIFY_HEAP
362 if (FLAG_verify_heap) { 363 if (FLAG_verify_heap) {
363 VerifyValidStoreAndSlotsBufferEntries(heap_); 364 VerifyValidStoreAndSlotsBufferEntries(heap_);
364 } 365 }
365 #endif 366 #endif
366 367
367 SweepSpaces(); 368 SweepSpaces();
368 369
369 #ifdef VERIFY_HEAP
370 VerifyWeakEmbeddedObjectsInCode();
371 if (FLAG_omit_map_checks_for_leaf_maps) {
372 VerifyOmittedMapChecks();
373 }
374 #endif
375
376 Finish(); 370 Finish();
377 371
378 if (marking_parity_ == EVEN_MARKING_PARITY) { 372 if (marking_parity_ == EVEN_MARKING_PARITY) {
379 marking_parity_ = ODD_MARKING_PARITY; 373 marking_parity_ = ODD_MARKING_PARITY;
380 } else { 374 } else {
381 DCHECK(marking_parity_ == ODD_MARKING_PARITY); 375 DCHECK(marking_parity_ == ODD_MARKING_PARITY);
382 marking_parity_ = EVEN_MARKING_PARITY; 376 marking_parity_ = EVEN_MARKING_PARITY;
383 } 377 }
384 } 378 }
385 379
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
492 486
493 Heap* heap_; 487 Heap* heap_;
494 PagedSpace* space_; 488 PagedSpace* space_;
495 489
496 DISALLOW_COPY_AND_ASSIGN(SweeperTask); 490 DISALLOW_COPY_AND_ASSIGN(SweeperTask);
497 }; 491 };
498 492
499 493
500 void MarkCompactCollector::StartSweeperThreads() { 494 void MarkCompactCollector::StartSweeperThreads() {
501 DCHECK(free_list_old_space_.get()->IsEmpty()); 495 DCHECK(free_list_old_space_.get()->IsEmpty());
496 DCHECK(free_list_code_space_.get()->IsEmpty());
502 V8::GetCurrentPlatform()->CallOnBackgroundThread( 497 V8::GetCurrentPlatform()->CallOnBackgroundThread(
503 new SweeperTask(heap(), heap()->old_space()), 498 new SweeperTask(heap(), heap()->old_space()),
504 v8::Platform::kShortRunningTask); 499 v8::Platform::kShortRunningTask);
500 V8::GetCurrentPlatform()->CallOnBackgroundThread(
501 new SweeperTask(heap(), heap()->code_space()),
502 v8::Platform::kShortRunningTask);
505 } 503 }
506 504
507 505
508 void MarkCompactCollector::EnsureSweepingCompleted() { 506 void MarkCompactCollector::EnsureSweepingCompleted() {
509 DCHECK(sweeping_in_progress_ == true); 507 DCHECK(sweeping_in_progress_ == true);
510 508
511 // If sweeping is not completed or not running at all, we try to complete it 509 // If sweeping is not completed or not running at all, we try to complete it
512 // here. 510 // here.
513 if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) { 511 if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) {
514 SweepInParallel(heap()->paged_space(OLD_SPACE), 0); 512 SweepInParallel(heap()->paged_space(OLD_SPACE), 0);
513 SweepInParallel(heap()->paged_space(CODE_SPACE), 0);
515 } 514 }
516 // Wait twice for both jobs. 515 // Wait twice for both jobs.
517 if (heap()->concurrent_sweeping_enabled()) { 516 if (heap()->concurrent_sweeping_enabled()) {
518 pending_sweeper_jobs_semaphore_.Wait(); 517 pending_sweeper_jobs_semaphore_.Wait();
518 pending_sweeper_jobs_semaphore_.Wait();
519 } 519 }
520 ParallelSweepSpacesComplete(); 520 ParallelSweepSpacesComplete();
521 sweeping_in_progress_ = false; 521 sweeping_in_progress_ = false;
522 RefillFreeList(heap()->paged_space(OLD_SPACE)); 522 RefillFreeList(heap()->paged_space(OLD_SPACE));
523 RefillFreeList(heap()->paged_space(CODE_SPACE));
523 heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes(); 524 heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes();
525 heap()->paged_space(CODE_SPACE)->ResetUnsweptFreeBytes();
524 526
525 #ifdef VERIFY_HEAP 527 #ifdef VERIFY_HEAP
526 if (FLAG_verify_heap && !evacuation()) { 528 if (FLAG_verify_heap && !evacuation()) {
527 VerifyEvacuation(heap_); 529 VerifyEvacuation(heap_);
528 } 530 }
529 #endif 531 #endif
530 } 532 }
531 533
532 534
535 void MarkCompactCollector::EnsureSweepingCompleted(Page* page,
536 PagedSpace* space) {
537 if (!page->SweepingCompleted()) {
538 SweepInParallel(page, space);
539 if (!page->SweepingCompleted()) {
540 // We were not able to sweep that page, i.e., a concurrent
541 // sweeper thread currently owns this page.
542 // TODO(hpayer): This may introduce a huge pause here. We
543 // just care about finish sweeping of the scan on scavenge page.
544 EnsureSweepingCompleted();
545 }
546 }
547 }
548
549
533 bool MarkCompactCollector::IsSweepingCompleted() { 550 bool MarkCompactCollector::IsSweepingCompleted() {
534 if (!pending_sweeper_jobs_semaphore_.WaitFor( 551 if (!pending_sweeper_jobs_semaphore_.WaitFor(
535 base::TimeDelta::FromSeconds(0))) { 552 base::TimeDelta::FromSeconds(0))) {
536 return false; 553 return false;
537 } 554 }
538 pending_sweeper_jobs_semaphore_.Signal(); 555 pending_sweeper_jobs_semaphore_.Signal();
539 return true; 556 return true;
540 } 557 }
541 558
542 559
543 void MarkCompactCollector::RefillFreeList(PagedSpace* space) { 560 void MarkCompactCollector::RefillFreeList(PagedSpace* space) {
544 FreeList* free_list; 561 FreeList* free_list;
545 562
546 if (space == heap()->old_space()) { 563 if (space == heap()->old_space()) {
547 free_list = free_list_old_space_.get(); 564 free_list = free_list_old_space_.get();
565 } else if (space == heap()->code_space()) {
566 free_list = free_list_code_space_.get();
548 } else { 567 } else {
549 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure 568 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure
550 // to only refill them for the old space. 569 // to only refill them for the old space.
551 return; 570 return;
552 } 571 }
553 572
554 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); 573 intptr_t freed_bytes = space->free_list()->Concatenate(free_list);
555 space->AddToAccountingStats(freed_bytes); 574 space->AddToAccountingStats(freed_bytes);
556 space->DecrementUnsweptFreeBytes(freed_bytes); 575 space->DecrementUnsweptFreeBytes(freed_bytes);
557 } 576 }
(...skipping 2928 matching lines...) Expand 10 before | Expand all | Expand 10 after
3486 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST, 3505 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST,
3487 space->identity() == CODE_SPACE); 3506 space->identity() == CODE_SPACE);
3488 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); 3507 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST));
3489 DCHECK(parallelism == MarkCompactCollector::SWEEP_ON_MAIN_THREAD || 3508 DCHECK(parallelism == MarkCompactCollector::SWEEP_ON_MAIN_THREAD ||
3490 sweeping_mode == SWEEP_ONLY); 3509 sweeping_mode == SWEEP_ONLY);
3491 3510
3492 Address free_start = p->area_start(); 3511 Address free_start = p->area_start();
3493 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); 3512 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
3494 int offsets[16]; 3513 int offsets[16];
3495 3514
3515 // If we use the skip list for code space pages, we have to lock the skip
3516 // list because it could be accessed concurrently by the runtime or the
3517 // deoptimizer.
3496 SkipList* skip_list = p->skip_list(); 3518 SkipList* skip_list = p->skip_list();
3497 int curr_region = -1;
3498 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { 3519 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) {
3499 skip_list->Clear(); 3520 skip_list->Clear();
3500 } 3521 }
3501 3522
3502 intptr_t freed_bytes = 0; 3523 intptr_t freed_bytes = 0;
3503 intptr_t max_freed_bytes = 0; 3524 intptr_t max_freed_bytes = 0;
3525 int curr_region = -1;
3504 3526
3505 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { 3527 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) {
3506 Address cell_base = it.CurrentCellBase(); 3528 Address cell_base = it.CurrentCellBase();
3507 MarkBit::CellType* cell = it.CurrentCell(); 3529 MarkBit::CellType* cell = it.CurrentCell();
3508 int live_objects = MarkWordToObjectStarts(*cell, offsets); 3530 int live_objects = MarkWordToObjectStarts(*cell, offsets);
3509 int live_index = 0; 3531 int live_index = 0;
3510 for (; live_objects != 0; live_objects--) { 3532 for (; live_objects != 0; live_objects--) {
3511 Address free_end = cell_base + offsets[live_index++] * kPointerSize; 3533 Address free_end = cell_base + offsets[live_index++] * kPointerSize;
3512 if (free_end != free_start) { 3534 if (free_end != free_start) {
3513 int size = static_cast<int>(free_end - free_start); 3535 int size = static_cast<int>(free_end - free_start);
(...skipping 643 matching lines...) Expand 10 before | Expand all | Expand 10 after
4157 max_freed_overall = Max(max_freed, max_freed_overall); 4179 max_freed_overall = Max(max_freed, max_freed_overall);
4158 if (p == space->end_of_unswept_pages()) break; 4180 if (p == space->end_of_unswept_pages()) break;
4159 } 4181 }
4160 return max_freed_overall; 4182 return max_freed_overall;
4161 } 4183 }
4162 4184
4163 4185
4164 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) { 4186 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) {
4165 int max_freed = 0; 4187 int max_freed = 0;
4166 if (page->TryParallelSweeping()) { 4188 if (page->TryParallelSweeping()) {
4167 FreeList* free_list = free_list_old_space_.get(); 4189 FreeList* free_list;
4168 FreeList private_free_list(space); 4190 FreeList private_free_list(space);
4169 max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST, 4191 if (space->identity() == CODE_SPACE) {
4170 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL); 4192 free_list = free_list_code_space_.get();
4193 max_freed =
4194 Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, REBUILD_SKIP_LIST,
4195 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
4196 } else {
4197 free_list = free_list_old_space_.get();
4198 max_freed =
4199 Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST,
4200 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
4201 }
4171 free_list->Concatenate(&private_free_list); 4202 free_list->Concatenate(&private_free_list);
4172 } 4203 }
4173 return max_freed; 4204 return max_freed;
4174 } 4205 }
4175 4206
4176 4207
4177 void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { 4208 void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
4178 space->ClearStats(); 4209 space->ClearStats();
4179 4210
4180 // We defensively initialize end_of_unswept_pages_ here with the first page 4211 // We defensively initialize end_of_unswept_pages_ here with the first page
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
4217 unused_page_present = true; 4248 unused_page_present = true;
4218 } 4249 }
4219 4250
4220 switch (sweeper) { 4251 switch (sweeper) {
4221 case CONCURRENT_SWEEPING: 4252 case CONCURRENT_SWEEPING:
4222 if (!parallel_sweeping_active) { 4253 if (!parallel_sweeping_active) {
4223 if (FLAG_gc_verbose) { 4254 if (FLAG_gc_verbose) {
4224 PrintF("Sweeping 0x%" V8PRIxPTR ".\n", 4255 PrintF("Sweeping 0x%" V8PRIxPTR ".\n",
4225 reinterpret_cast<intptr_t>(p)); 4256 reinterpret_cast<intptr_t>(p));
4226 } 4257 }
4227 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST, 4258 if (space->identity() == CODE_SPACE) {
4228 IGNORE_FREE_SPACE>(space, NULL, p, NULL); 4259 if (FLAG_zap_code_space) {
4260 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
4261 ZAP_FREE_SPACE>(space, NULL, p, NULL);
4262 } else {
4263 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
4264 IGNORE_FREE_SPACE>(space, NULL, p, NULL);
4265 }
4266 } else {
4267 DCHECK(space->identity() == OLD_SPACE);
4268 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST,
4269 IGNORE_FREE_SPACE>(space, NULL, p, NULL);
4270 }
4229 pages_swept++; 4271 pages_swept++;
4230 parallel_sweeping_active = true; 4272 parallel_sweeping_active = true;
4231 } else { 4273 } else {
4232 if (FLAG_gc_verbose) { 4274 if (FLAG_gc_verbose) {
4233 PrintF("Sweeping 0x%" V8PRIxPTR " in parallel.\n", 4275 PrintF("Sweeping 0x%" V8PRIxPTR " in parallel.\n",
4234 reinterpret_cast<intptr_t>(p)); 4276 reinterpret_cast<intptr_t>(p));
4235 } 4277 }
4236 p->set_parallel_sweeping(MemoryChunk::SWEEPING_PENDING); 4278 p->set_parallel_sweeping(MemoryChunk::SWEEPING_PENDING);
4237 space->IncreaseUnsweptFreeBytes(p); 4279 space->IncreaseUnsweptFreeBytes(p);
4238 } 4280 }
4239 space->set_end_of_unswept_pages(p); 4281 space->set_end_of_unswept_pages(p);
4240 break; 4282 break;
4241 case SEQUENTIAL_SWEEPING: { 4283 case SEQUENTIAL_SWEEPING: {
4242 if (FLAG_gc_verbose) { 4284 if (FLAG_gc_verbose) {
4243 PrintF("Sweeping 0x%" V8PRIxPTR ".\n", reinterpret_cast<intptr_t>(p)); 4285 PrintF("Sweeping 0x%" V8PRIxPTR ".\n", reinterpret_cast<intptr_t>(p));
4244 } 4286 }
4245 if (space->identity() == CODE_SPACE && FLAG_zap_code_space) { 4287 if (space->identity() == CODE_SPACE) {
4246 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, 4288 if (FLAG_zap_code_space) {
4247 ZAP_FREE_SPACE>(space, NULL, p, NULL); 4289 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
4248 } else if (space->identity() == CODE_SPACE) { 4290 ZAP_FREE_SPACE>(space, NULL, p, NULL);
4249 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, 4291 } else {
4250 IGNORE_FREE_SPACE>(space, NULL, p, NULL); 4292 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
4293 IGNORE_FREE_SPACE>(space, NULL, p, NULL);
4294 }
4251 } else { 4295 } else {
4296 DCHECK(space->identity() == OLD_SPACE ||
4297 space->identity() == MAP_SPACE);
4252 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST, 4298 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST,
4253 IGNORE_FREE_SPACE>(space, NULL, p, NULL); 4299 IGNORE_FREE_SPACE>(space, NULL, p, NULL);
4254 } 4300 }
4255 pages_swept++; 4301 pages_swept++;
4256 break; 4302 break;
4257 } 4303 }
4258 default: { UNREACHABLE(); } 4304 default: { UNREACHABLE(); }
4259 } 4305 }
4260 } 4306 }
4261 4307
(...skipping 19 matching lines...) Expand all
4281 #endif 4327 #endif
4282 4328
4283 MoveEvacuationCandidatesToEndOfPagesList(); 4329 MoveEvacuationCandidatesToEndOfPagesList();
4284 4330
4285 // Noncompacting collections simply sweep the spaces to clear the mark 4331 // Noncompacting collections simply sweep the spaces to clear the mark
4286 // bits and free the nonlive blocks (for old and map spaces). We sweep 4332 // bits and free the nonlive blocks (for old and map spaces). We sweep
4287 // the map space last because freeing non-live maps overwrites them and 4333 // the map space last because freeing non-live maps overwrites them and
4288 // the other spaces rely on possibly non-live maps to get the sizes for 4334 // the other spaces rely on possibly non-live maps to get the sizes for
4289 // non-live objects. 4335 // non-live objects.
4290 { 4336 {
4291 GCTracer::Scope sweep_scope(heap()->tracer(), 4337 {
4292 GCTracer::Scope::MC_SWEEP_OLDSPACE); 4338 GCTracer::Scope sweep_scope(heap()->tracer(),
4293 { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); } 4339 GCTracer::Scope::MC_SWEEP_OLDSPACE);
4340 SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING);
4341 }
4342 {
4343 GCTracer::Scope sweep_scope(heap()->tracer(),
4344 GCTracer::Scope::MC_SWEEP_CODE);
4345 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING);
4346 }
4347
4294 sweeping_in_progress_ = true; 4348 sweeping_in_progress_ = true;
4295 if (heap()->concurrent_sweeping_enabled()) { 4349 if (heap()->concurrent_sweeping_enabled()) {
4296 StartSweeperThreads(); 4350 StartSweeperThreads();
4297 } 4351 }
4298 } 4352 }
4299 {
4300 GCTracer::Scope sweep_scope(heap()->tracer(),
4301 GCTracer::Scope::MC_SWEEP_CODE);
4302 SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING);
4303 }
4304 4353
4305 EvacuateNewSpaceAndCandidates(); 4354 EvacuateNewSpaceAndCandidates();
4306 4355
4307 heap()->FreeDeadArrayBuffers(false); 4356 heap()->FreeDeadArrayBuffers(false);
4308 4357
4309 // ClearNonLiveReferences depends on precise sweeping of map space to 4358 // ClearNonLiveReferences depends on precise sweeping of map space to
4310 // detect whether unmarked map became dead in this collection or in one 4359 // detect whether unmarked map became dead in this collection or in one
4311 // of the previous ones. 4360 // of the previous ones.
4312 { 4361 {
4313 GCTracer::Scope sweep_scope(heap()->tracer(), 4362 GCTracer::Scope sweep_scope(heap()->tracer(),
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
4346 p->set_parallel_sweeping(MemoryChunk::SWEEPING_DONE); 4395 p->set_parallel_sweeping(MemoryChunk::SWEEPING_DONE);
4347 p->SetWasSwept(); 4396 p->SetWasSwept();
4348 } 4397 }
4349 DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE); 4398 DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE);
4350 } 4399 }
4351 } 4400 }
4352 4401
4353 4402
4354 void MarkCompactCollector::ParallelSweepSpacesComplete() { 4403 void MarkCompactCollector::ParallelSweepSpacesComplete() {
4355 ParallelSweepSpaceComplete(heap()->old_space()); 4404 ParallelSweepSpaceComplete(heap()->old_space());
4405 ParallelSweepSpaceComplete(heap()->code_space());
4356 } 4406 }
4357 4407
4358 4408
4359 void MarkCompactCollector::EnableCodeFlushing(bool enable) { 4409 void MarkCompactCollector::EnableCodeFlushing(bool enable) {
4360 if (isolate()->debug()->is_loaded() || 4410 if (isolate()->debug()->is_loaded() ||
4361 isolate()->debug()->has_break_points()) { 4411 isolate()->debug()->has_break_points()) {
4362 enable = false; 4412 enable = false;
4363 } 4413 }
4364 4414
4365 if (enable) { 4415 if (enable) {
(...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after
4645 SlotsBuffer* buffer = *buffer_address; 4695 SlotsBuffer* buffer = *buffer_address;
4646 while (buffer != NULL) { 4696 while (buffer != NULL) {
4647 SlotsBuffer* next_buffer = buffer->next(); 4697 SlotsBuffer* next_buffer = buffer->next();
4648 DeallocateBuffer(buffer); 4698 DeallocateBuffer(buffer);
4649 buffer = next_buffer; 4699 buffer = next_buffer;
4650 } 4700 }
4651 *buffer_address = NULL; 4701 *buffer_address = NULL;
4652 } 4702 }
4653 } // namespace internal 4703 } // namespace internal
4654 } // namespace v8 4704 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/store-buffer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698