| OLD | NEW |
| 1 // Copyright 2006-2010 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1699 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1710 ASSERT(bytes <= InitialCapacity()); | 1710 ASSERT(bytes <= InitialCapacity()); |
| 1711 Address limit = allocation_info_.limit; | 1711 Address limit = allocation_info_.limit; |
| 1712 Address top = allocation_info_.top; | 1712 Address top = allocation_info_.top; |
| 1713 return limit - top >= bytes; | 1713 return limit - top >= bytes; |
| 1714 } | 1714 } |
| 1715 | 1715 |
| 1716 | 1716 |
| 1717 void PagedSpace::FreePages(Page* prev, Page* last) { | 1717 void PagedSpace::FreePages(Page* prev, Page* last) { |
| 1718 if (last == AllocationTopPage()) { | 1718 if (last == AllocationTopPage()) { |
| 1719 // Pages are already at the end of used pages. | 1719 // Pages are already at the end of used pages. |
| 1720 // Just mark them as continuos. | 1720 // Just mark them as continuous. |
| 1721 Page* p = prev == NULL ? first_page_ : prev->next_page(); | 1721 Page* p = prev == NULL ? first_page_ : prev->next_page(); |
| 1722 Page* end_page = last->next_page(); | 1722 Page* end_page = last->next_page(); |
| 1723 do { | 1723 do { |
| 1724 p->SetFlag(Page::IS_CONTINUOUS); | 1724 p->SetFlag(Page::IS_CONTINUOUS); |
| 1725 p = p->next_page(); | 1725 p = p->next_page(); |
| 1726 } while (p != end_page); | 1726 } while (p != end_page); |
| 1727 return; | 1727 return; |
| 1728 } | 1728 } |
| 1729 | 1729 |
| 1730 Page* first = NULL; | 1730 Page* first = NULL; |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1741 // Attach it after the last page. | 1741 // Attach it after the last page. |
| 1742 last_page_->set_next_page(first); | 1742 last_page_->set_next_page(first); |
| 1743 last_page_ = last; | 1743 last_page_ = last; |
| 1744 last->set_next_page(NULL); | 1744 last->set_next_page(NULL); |
| 1745 | 1745 |
| 1746 // Clean them up. | 1746 // Clean them up. |
| 1747 do { | 1747 do { |
| 1748 first->InvalidateWatermark(true); | 1748 first->InvalidateWatermark(true); |
| 1749 first->SetAllocationWatermark(first->ObjectAreaStart()); | 1749 first->SetAllocationWatermark(first->ObjectAreaStart()); |
| 1750 first->SetCachedAllocationWatermark(first->ObjectAreaStart()); | 1750 first->SetCachedAllocationWatermark(first->ObjectAreaStart()); |
| 1751 first->SetRegionMarks(Page::kAllRegionsCleanMarks); | |
| 1752 first->SetFlag(Page::IS_CONTINUOUS); | 1751 first->SetFlag(Page::IS_CONTINUOUS); |
| 1753 first->markbits()->Clear(); | 1752 first->markbits()->Clear(); |
| 1754 first = first->next_page(); | 1753 first = first->next_page(); |
| 1755 } while (first->is_valid()); | 1754 } while (first->is_valid()); |
| 1756 } | 1755 } |
| 1757 | 1756 |
| 1758 | 1757 |
| 1759 void PagedSpace::PrepareForMarkCompact(bool will_compact) { | 1758 void PagedSpace::PrepareForMarkCompact(bool will_compact) { |
| 1760 ASSERT(!will_compact); | 1759 ASSERT(!will_compact); |
| 1761 } | 1760 } |
| (...skipping 592 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2354 chunk = chunk->next_page()) { | 2353 chunk = chunk->next_page()) { |
| 2355 Address chunk_address = chunk->address(); | 2354 Address chunk_address = chunk->address(); |
| 2356 if (chunk_address <= pc && pc < chunk_address + chunk->size()) { | 2355 if (chunk_address <= pc && pc < chunk_address + chunk->size()) { |
| 2357 return chunk; | 2356 return chunk; |
| 2358 } | 2357 } |
| 2359 } | 2358 } |
| 2360 return NULL; | 2359 return NULL; |
| 2361 } | 2360 } |
| 2362 | 2361 |
| 2363 | 2362 |
| 2364 void LargeObjectSpace::IterateDirtyRegions(ObjectSlotCallback copy_object) { | 2363 void LargeObjectSpace::IteratePointersToNewSpace( |
| 2364 ObjectSlotCallback copy_object) { |
| 2365 LargeObjectIterator it(this); | 2365 LargeObjectIterator it(this); |
| 2366 for (HeapObject* object = it.next(); object != NULL; object = it.next()) { | 2366 for (HeapObject* object = it.next(); object != NULL; object = it.next()) { |
| 2367 // We only have code, sequential strings, or fixed arrays in large | 2367 // We only have code, sequential strings, or fixed arrays in large |
| 2368 // object space, and only fixed arrays can possibly contain pointers to | 2368 // object space, and only fixed arrays can possibly contain pointers to |
| 2369 // the young generation. | 2369 // the young generation. |
| 2370 if (object->IsFixedArray()) { | 2370 if (object->IsFixedArray()) { |
| 2371 // TODO(gc): we can no longer assume that LargePage is bigger than normal | 2371 // TODO(gc): we can no longer assume that LargePage is bigger than normal |
| 2372 // page. | 2372 // page. |
| 2373 | 2373 |
| 2374 Address start = object->address(); | 2374 Address start = object->address(); |
| 2375 Address object_end = start + object->Size(); | 2375 Address object_end = start + object->Size(); |
| 2376 Heap::IteratePointersInDirtyRegion(start, object_end, copy_object); | 2376 Heap::IteratePointersToNewSpace(start, object_end, copy_object); |
| 2377 } | 2377 } |
| 2378 } | 2378 } |
| 2379 } | 2379 } |
| 2380 | 2380 |
| 2381 | 2381 |
| 2382 void LargeObjectSpace::FreeUnmarkedObjects() { | 2382 void LargeObjectSpace::FreeUnmarkedObjects() { |
| 2383 LargePage* previous = NULL; | 2383 LargePage* previous = NULL; |
| 2384 LargePage* current = first_page_; | 2384 LargePage* current = first_page_; |
| 2385 while (current != NULL) { | 2385 while (current != NULL) { |
| 2386 HeapObject* object = current->GetObject(); | 2386 HeapObject* object = current->GetObject(); |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2456 // The object itself should look OK. | 2456 // The object itself should look OK. |
| 2457 object->Verify(); | 2457 object->Verify(); |
| 2458 | 2458 |
| 2459 // Byte arrays and strings don't have interior pointers. | 2459 // Byte arrays and strings don't have interior pointers. |
| 2460 if (object->IsCode()) { | 2460 if (object->IsCode()) { |
| 2461 VerifyPointersVisitor code_visitor; | 2461 VerifyPointersVisitor code_visitor; |
| 2462 object->IterateBody(map->instance_type(), | 2462 object->IterateBody(map->instance_type(), |
| 2463 object->Size(), | 2463 object->Size(), |
| 2464 &code_visitor); | 2464 &code_visitor); |
| 2465 } else if (object->IsFixedArray()) { | 2465 } else if (object->IsFixedArray()) { |
| 2466 // We loop over fixed arrays ourselves, rather then using the visitor, | |
| 2467 // because the visitor doesn't support the start/offset iteration | |
| 2468 // needed for IsRegionDirty. | |
| 2469 FixedArray* array = FixedArray::cast(object); | 2466 FixedArray* array = FixedArray::cast(object); |
| 2470 for (int j = 0; j < array->length(); j++) { | 2467 for (int j = 0; j < array->length(); j++) { |
| 2471 Object* element = array->get(j); | 2468 Object* element = array->get(j); |
| 2472 if (element->IsHeapObject()) { | 2469 if (element->IsHeapObject()) { |
| 2473 HeapObject* element_object = HeapObject::cast(element); | 2470 HeapObject* element_object = HeapObject::cast(element); |
| 2474 ASSERT(Heap::Contains(element_object)); | 2471 ASSERT(Heap::Contains(element_object)); |
| 2475 ASSERT(element_object->map()->IsMap()); | 2472 ASSERT(element_object->map()->IsMap()); |
| 2476 if (Heap::InNewSpace(element_object)) { | |
| 2477 Address array_addr = object->address(); | |
| 2478 Address element_addr = array_addr + FixedArray::kHeaderSize + | |
| 2479 j * kPointerSize; | |
| 2480 | |
| 2481 ASSERT(Page::FromAddress(array_addr)->IsRegionDirty(element_addr)); | |
| 2482 } | |
| 2483 } | 2473 } |
| 2484 } | 2474 } |
| 2485 } | 2475 } |
| 2486 } | 2476 } |
| 2487 } | 2477 } |
| 2488 | 2478 |
| 2489 | 2479 |
| 2490 void LargeObjectSpace::Print() { | 2480 void LargeObjectSpace::Print() { |
| 2491 LargeObjectIterator it(this); | 2481 LargeObjectIterator it(this); |
| 2492 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { | 2482 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { |
| (...skipping 23 matching lines...) Expand all Loading... |
| 2516 for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) { | 2506 for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) { |
| 2517 if (obj->IsCode()) { | 2507 if (obj->IsCode()) { |
| 2518 Code* code = Code::cast(obj); | 2508 Code* code = Code::cast(obj); |
| 2519 code_kind_statistics[code->kind()] += code->Size(); | 2509 code_kind_statistics[code->kind()] += code->Size(); |
| 2520 } | 2510 } |
| 2521 } | 2511 } |
| 2522 } | 2512 } |
| 2523 #endif // DEBUG | 2513 #endif // DEBUG |
| 2524 | 2514 |
| 2525 } } // namespace v8::internal | 2515 } } // namespace v8::internal |
| OLD | NEW |