Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/store-buffer.cc

Issue 9017009: Reduce signal sender thread stack size to 32k. Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 507 matching lines...) Expand 10 before | Expand all | Expand 10 after
518 // to these maps. The special garbage section (see comment in spaces.h) is 518 // to these maps. The special garbage section (see comment in spaces.h) is
519 // skipped since it can contain absolutely anything. Any objects that are 519 // skipped since it can contain absolutely anything. Any objects that are
520 // allocated during iteration may or may not be visited by the iteration, but 520 // allocated during iteration may or may not be visited by the iteration, but
521 // they will not be partially visited. 521 // they will not be partially visited.
522 void StoreBuffer::FindPointersToNewSpaceOnPage( 522 void StoreBuffer::FindPointersToNewSpaceOnPage(
523 PagedSpace* space, 523 PagedSpace* space,
524 Page* page, 524 Page* page,
525 RegionCallback region_callback, 525 RegionCallback region_callback,
526 ObjectSlotCallback slot_callback) { 526 ObjectSlotCallback slot_callback) {
527 Address visitable_start = page->ObjectAreaStart(); 527 Address visitable_start = page->ObjectAreaStart();
528 Address end_of_page = page->ObjectAreaEnd();
529 528
530 Address visitable_end = visitable_start; 529 Address visitable_end = visitable_start;
531 530
532 Object* free_space_map = heap_->free_space_map(); 531 Object* free_space_map = heap_->free_space_map();
533 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); 532 Object* two_pointer_filler_map = heap_->two_pointer_filler_map();
534 533
535 while (visitable_end < end_of_page) { 534 while (true) { // While the page grows (doesn't normally happen).
536 Object* o = *reinterpret_cast<Object**>(visitable_end); 535 Address end_of_page = page->ObjectAreaEnd();
537 // Skip fillers but not things that look like fillers in the special 536 while (visitable_end < end_of_page) {
538 // garbage section which can contain anything. 537 Object* o = *reinterpret_cast<Object**>(visitable_end);
539 if (o == free_space_map || 538 // Skip fillers but not things that look like fillers in the special
540 o == two_pointer_filler_map || 539 // garbage section which can contain anything.
541 (visitable_end == space->top() && visitable_end != space->limit())) { 540 if (o == free_space_map ||
542 if (visitable_start != visitable_end) { 541 o == two_pointer_filler_map ||
543 // After calling this the special garbage section may have moved. 542 (visitable_end == space->top() && visitable_end != space->limit())) {
544 (this->*region_callback)(visitable_start, 543 if (visitable_start != visitable_end) {
545 visitable_end, 544 // After calling this the special garbage section may have moved.
546 slot_callback); 545 (this->*region_callback)(visitable_start,
547 if (visitable_end >= space->top() && visitable_end < space->limit()) { 546 visitable_end,
548 visitable_end = space->limit(); 547 slot_callback);
549 visitable_start = visitable_end; 548 if (visitable_end >= space->top() && visitable_end < space->limit()) {
550 continue; 549 visitable_end = space->limit();
550 visitable_start = visitable_end;
551 continue;
552 }
551 } 553 }
554 if (visitable_end == space->top() && visitable_end != space->limit()) {
555 visitable_start = visitable_end = space->limit();
556 } else {
557 // At this point we are either at the start of a filler or we are at
558 // the point where the space->top() used to be before the
559 // visit_pointer_region call above. Either way we can skip the
560 // object at the current spot: We don't promise to visit objects
561 // allocated during heap traversal, and if space->top() moved then it
562 // must be because an object was allocated at this point.
563 visitable_start =
564 visitable_end + HeapObject::FromAddress(visitable_end)->Size();
565 visitable_end = visitable_start;
566 }
567 } else {
568 ASSERT(o != free_space_map);
569 ASSERT(o != two_pointer_filler_map);
570 ASSERT(visitable_end < space->top() || visitable_end >= space->limit());
571 visitable_end += kPointerSize;
552 } 572 }
553 if (visitable_end == space->top() && visitable_end != space->limit()) {
554 visitable_start = visitable_end = space->limit();
555 } else {
556 // At this point we are either at the start of a filler or we are at
557 // the point where the space->top() used to be before the
558 // visit_pointer_region call above. Either way we can skip the
559 // object at the current spot: We don't promise to visit objects
560 // allocated during heap traversal, and if space->top() moved then it
561 // must be because an object was allocated at this point.
562 visitable_start =
563 visitable_end + HeapObject::FromAddress(visitable_end)->Size();
564 visitable_end = visitable_start;
565 }
566 } else {
567 ASSERT(o != free_space_map);
568 ASSERT(o != two_pointer_filler_map);
569 ASSERT(visitable_end < space->top() || visitable_end >= space->limit());
570 visitable_end += kPointerSize;
571 } 573 }
574 ASSERT(visitable_end >= end_of_page);
575 // If the page did not grow we are done.
576 if (end_of_page == page->ObjectAreaEnd()) break;
572 } 577 }
573 ASSERT(visitable_end == end_of_page); 578 ASSERT(visitable_end == page->ObjectAreaEnd());
574 if (visitable_start != visitable_end) { 579 if (visitable_start != visitable_end) {
575 (this->*region_callback)(visitable_start, 580 (this->*region_callback)(visitable_start,
576 visitable_end, 581 visitable_end,
577 slot_callback); 582 slot_callback);
578 } 583 }
579 } 584 }
580 585
581 586
582 void StoreBuffer::IteratePointersInStoreBuffer( 587 void StoreBuffer::IteratePointersInStoreBuffer(
583 ObjectSlotCallback slot_callback) { 588 ObjectSlotCallback slot_callback) {
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
710 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); 715 heap_->isolate()->counters()->store_buffer_compactions()->Increment();
711 CheckForFullBuffer(); 716 CheckForFullBuffer();
712 } 717 }
713 718
714 719
715 void StoreBuffer::CheckForFullBuffer() { 720 void StoreBuffer::CheckForFullBuffer() {
716 EnsureSpace(kStoreBufferSize * 2); 721 EnsureSpace(kStoreBufferSize * 2);
717 } 722 }
718 723
719 } } // namespace v8::internal 724 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698