OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1472 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1483 if (external_string_table_.old_space_strings_.length() > 0) { | 1483 if (external_string_table_.old_space_strings_.length() > 0) { |
1484 Object** start = &external_string_table_.old_space_strings_[0]; | 1484 Object** start = &external_string_table_.old_space_strings_[0]; |
1485 Object** end = start + external_string_table_.old_space_strings_.length(); | 1485 Object** end = start + external_string_table_.old_space_strings_.length(); |
1486 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); | 1486 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); |
1487 } | 1487 } |
1488 | 1488 |
1489 UpdateNewSpaceReferencesInExternalStringTable(updater_func); | 1489 UpdateNewSpaceReferencesInExternalStringTable(updater_func); |
1490 } | 1490 } |
1491 | 1491 |
1492 | 1492 |
1493 static Object* ProcessFunctionWeakReferences(Heap* heap, | 1493 template <class T> |
1494 Object* function, | 1494 struct WeakListVisitor; |
1495 WeakObjectRetainer* retainer, | 1495 |
1496 bool record_slots) { | 1496 |
1497 template <class T> | |
1498 static Object* VisitWeakList(Heap* heap, | |
1499 Object* list, | |
1500 WeakObjectRetainer* retainer, | |
1501 bool record_slots) { | |
1497 Object* undefined = heap->undefined_value(); | 1502 Object* undefined = heap->undefined_value(); |
1498 Object* head = undefined; | 1503 Object* head = undefined; |
1499 JSFunction* tail = NULL; | 1504 T* tail = NULL; |
1500 Object* candidate = function; | 1505 MarkCompactCollector* collector = heap->mark_compact_collector(); |
1501 while (candidate != undefined) { | 1506 while (list != undefined) { |
1502 // Check whether to keep the candidate in the list. | 1507 // Check whether to keep the candidate in the list. |
1503 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate); | 1508 T* candidate = reinterpret_cast<T*>(list); |
1504 Object* retain = retainer->RetainAs(candidate); | 1509 Object* retained = retainer->RetainAs(list); |
1505 if (retain != NULL) { | 1510 if (retained != NULL) { |
1506 if (head == undefined) { | 1511 if (head == undefined) { |
1507 // First element in the list. | 1512 // First element in the list. |
1508 head = retain; | 1513 head = retained; |
1509 } else { | 1514 } else { |
1510 // Subsequent elements in the list. | 1515 // Subsequent elements in the list. |
1511 ASSERT(tail != NULL); | 1516 ASSERT(tail != NULL); |
1512 tail->set_next_function_link(retain); | 1517 WeakListVisitor<T>::SetWeakNext(tail, retained); |
1513 if (record_slots) { | 1518 if (record_slots) { |
1514 Object** next_function = | 1519 Object** next_slot = |
1515 HeapObject::RawField(tail, JSFunction::kNextFunctionLinkOffset); | 1520 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset()); |
1516 heap->mark_compact_collector()->RecordSlot( | 1521 collector->RecordSlot(next_slot, next_slot, retained); |
1517 next_function, next_function, retain); | |
1518 } | 1522 } |
1519 } | 1523 } |
1520 // Retained function is new tail. | 1524 // Retained object is new tail. |
1521 candidate_function = reinterpret_cast<JSFunction*>(retain); | 1525 ASSERT(!retained->IsUndefined()); |
1522 tail = candidate_function; | 1526 candidate = reinterpret_cast<T*>(retained); |
1527 tail = candidate; | |
1523 | 1528 |
1524 ASSERT(retain->IsUndefined() || retain->IsJSFunction()); | |
1525 | 1529 |
1526 if (retain == undefined) break; | 1530 // tail is a live object, visit it. |
Hannes Payer (out of office)
2013/06/12 09:12:08
Tail
Dmitry Lomov (no reviews)
2013/06/12 09:22:59
'tail' is a variable name here.
| |
1531 WeakListVisitor<T>::VisitLiveObject( | |
1532 heap, tail, retainer, record_slots); | |
1527 } | 1533 } |
1528 | 1534 |
1529 // Move to next element in the list. | 1535 // Move to next element in the list. |
1530 candidate = candidate_function->next_function_link(); | 1536 list = WeakListVisitor<T>::WeakNext(candidate); |
1531 } | 1537 } |
1532 | 1538 |
1533 // Terminate the list if there is one or more elements. | 1539 // Terminate the list if there is one or more elements. |
1534 if (tail != NULL) { | 1540 if (tail != NULL) { |
1535 tail->set_next_function_link(undefined); | 1541 WeakListVisitor<T>::SetWeakNext(tail, undefined); |
1542 } | |
1543 return head; | |
1544 } | |
1545 | |
1546 | |
1547 template<> | |
1548 struct WeakListVisitor<JSFunction> { | |
1549 static void SetWeakNext(JSFunction* function, Object* next) { | |
1550 function->set_next_function_link(next); | |
1536 } | 1551 } |
1537 | 1552 |
1538 return head; | 1553 static Object* WeakNext(JSFunction* function) { |
1539 } | 1554 return function->next_function_link(); |
1555 } | |
1556 | |
1557 static int WeakNextOffset() { | |
1558 return JSFunction::kNextFunctionLinkOffset; | |
1559 } | |
1560 | |
1561 static void VisitLiveObject(Heap*, JSFunction*, | |
1562 WeakObjectRetainer*, bool) { | |
1563 } | |
1564 }; | |
1565 | |
1566 | |
1567 template<> | |
1568 struct WeakListVisitor<Context> { | |
1569 static void SetWeakNext(Context* context, Object* next) { | |
1570 context->set(Context::NEXT_CONTEXT_LINK, | |
1571 next, | |
1572 UPDATE_WRITE_BARRIER); | |
1573 } | |
1574 | |
1575 static Object* WeakNext(Context* context) { | |
1576 return context->get(Context::NEXT_CONTEXT_LINK); | |
1577 } | |
1578 | |
1579 static void VisitLiveObject(Heap* heap, | |
1580 Context* context, | |
1581 WeakObjectRetainer* retainer, | |
1582 bool record_slots) { | |
1583 // Process the weak list of optimized functions for the context. | |
1584 Object* function_list_head = | |
1585 VisitWeakList<JSFunction>( | |
1586 heap, | |
1587 context->get(Context::OPTIMIZED_FUNCTIONS_LIST), | |
1588 retainer, | |
1589 record_slots); | |
1590 context->set(Context::OPTIMIZED_FUNCTIONS_LIST, | |
1591 function_list_head, | |
1592 UPDATE_WRITE_BARRIER); | |
1593 if (record_slots) { | |
1594 Object** optimized_functions = | |
1595 HeapObject::RawField( | |
1596 context, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST)); | |
1597 heap->mark_compact_collector()->RecordSlot( | |
1598 optimized_functions, optimized_functions, function_list_head); | |
1599 } | |
1600 } | |
1601 | |
1602 static int WeakNextOffset() { | |
1603 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); | |
1604 } | |
1605 }; | |
1540 | 1606 |
1541 | 1607 |
1542 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { | 1608 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { |
1543 // We don't record weak slots during marking or scavenges. | 1609 // We don't record weak slots during marking or scavenges. |
1544 // Instead we do it once when we complete mark-compact cycle. | 1610 // Instead we do it once when we complete mark-compact cycle. |
1545 // Note that write barrier has no effect if we are already in the middle of | 1611 // Note that write barrier has no effect if we are already in the middle of |
1546 // compacting mark-sweep cycle and we have to record slots manually. | 1612 // compacting mark-sweep cycle and we have to record slots manually. |
1547 bool record_slots = | 1613 bool record_slots = |
1548 gc_state() == MARK_COMPACT && | 1614 gc_state() == MARK_COMPACT && |
1549 mark_compact_collector()->is_compacting(); | 1615 mark_compact_collector()->is_compacting(); |
1550 ProcessArrayBuffers(retainer, record_slots); | 1616 ProcessArrayBuffers(retainer, record_slots); |
1551 ProcessNativeContexts(retainer, record_slots); | 1617 ProcessNativeContexts(retainer, record_slots); |
1552 } | 1618 } |
1553 | 1619 |
1554 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, | 1620 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, |
1555 bool record_slots) { | 1621 bool record_slots) { |
1556 Object* undefined = undefined_value(); | 1622 Object* head = |
1557 Object* head = undefined; | 1623 VisitWeakList<Context>( |
1558 Context* tail = NULL; | 1624 this, native_contexts_list(), retainer, record_slots); |
1559 Object* candidate = native_contexts_list_; | |
1560 | |
1561 while (candidate != undefined) { | |
1562 // Check whether to keep the candidate in the list. | |
1563 Context* candidate_context = reinterpret_cast<Context*>(candidate); | |
1564 Object* retain = retainer->RetainAs(candidate); | |
1565 if (retain != NULL) { | |
1566 if (head == undefined) { | |
1567 // First element in the list. | |
1568 head = retain; | |
1569 } else { | |
1570 // Subsequent elements in the list. | |
1571 ASSERT(tail != NULL); | |
1572 tail->set_unchecked(this, | |
1573 Context::NEXT_CONTEXT_LINK, | |
1574 retain, | |
1575 UPDATE_WRITE_BARRIER); | |
1576 | |
1577 if (record_slots) { | |
1578 Object** next_context = | |
1579 HeapObject::RawField( | |
1580 tail, FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK)); | |
1581 mark_compact_collector()->RecordSlot( | |
1582 next_context, next_context, retain); | |
1583 } | |
1584 } | |
1585 // Retained context is new tail. | |
1586 candidate_context = reinterpret_cast<Context*>(retain); | |
1587 tail = candidate_context; | |
1588 | |
1589 if (retain == undefined) break; | |
1590 | |
1591 // Process the weak list of optimized functions for the context. | |
1592 Object* function_list_head = | |
1593 ProcessFunctionWeakReferences( | |
1594 this, | |
1595 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST), | |
1596 retainer, | |
1597 record_slots); | |
1598 candidate_context->set_unchecked(this, | |
1599 Context::OPTIMIZED_FUNCTIONS_LIST, | |
1600 function_list_head, | |
1601 UPDATE_WRITE_BARRIER); | |
1602 if (record_slots) { | |
1603 Object** optimized_functions = | |
1604 HeapObject::RawField( | |
1605 tail, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST)); | |
1606 mark_compact_collector()->RecordSlot( | |
1607 optimized_functions, optimized_functions, function_list_head); | |
1608 } | |
1609 } | |
1610 | |
1611 // Move to next element in the list. | |
1612 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK); | |
1613 } | |
1614 | |
1615 // Terminate the list if there is one or more elements. | |
1616 if (tail != NULL) { | |
1617 tail->set_unchecked(this, | |
1618 Context::NEXT_CONTEXT_LINK, | |
1619 Heap::undefined_value(), | |
1620 UPDATE_WRITE_BARRIER); | |
1621 } | |
1622 | |
1623 // Update the head of the list of contexts. | 1625 // Update the head of the list of contexts. |
1624 native_contexts_list_ = head; | 1626 native_contexts_list_ = head; |
1625 } | 1627 } |
1626 | 1628 |
1627 | 1629 |
1628 template <class T> | |
1629 struct WeakListVisitor; | |
1630 | |
1631 | |
1632 template <class T> | |
1633 static Object* VisitWeakList(Object* list, | |
1634 MarkCompactCollector* collector, | |
1635 WeakObjectRetainer* retainer, bool record_slots) { | |
1636 Object* head = Smi::FromInt(0); | |
1637 T* tail = NULL; | |
1638 while (list != Smi::FromInt(0)) { | |
1639 Object* retained = retainer->RetainAs(list); | |
1640 if (retained != NULL) { | |
1641 if (head == Smi::FromInt(0)) { | |
1642 head = retained; | |
1643 } else { | |
1644 ASSERT(tail != NULL); | |
1645 WeakListVisitor<T>::set_weak_next(tail, retained); | |
1646 if (record_slots) { | |
1647 Object** next_slot = | |
1648 HeapObject::RawField(tail, WeakListVisitor<T>::kWeakNextOffset); | |
1649 collector->RecordSlot(next_slot, next_slot, retained); | |
1650 } | |
1651 } | |
1652 tail = reinterpret_cast<T*>(retained); | |
1653 WeakListVisitor<T>::VisitLiveObject( | |
1654 tail, collector, retainer, record_slots); | |
1655 } | |
1656 list = WeakListVisitor<T>::get_weak_next(reinterpret_cast<T*>(list)); | |
1657 } | |
1658 if (tail != NULL) { | |
1659 tail->set_weak_next(Smi::FromInt(0)); | |
1660 } | |
1661 return head; | |
1662 } | |
1663 | |
1664 | |
1665 template<> | 1630 template<> |
1666 struct WeakListVisitor<JSTypedArray> { | 1631 struct WeakListVisitor<JSTypedArray> { |
1667 static void set_weak_next(JSTypedArray* obj, Object* next) { | 1632 static void SetWeakNext(JSTypedArray* obj, Object* next) { |
1668 obj->set_weak_next(next); | 1633 obj->set_weak_next(next); |
1669 } | 1634 } |
1670 | 1635 |
1671 static Object* get_weak_next(JSTypedArray* obj) { | 1636 static Object* WeakNext(JSTypedArray* obj) { |
1672 return obj->weak_next(); | 1637 return obj->weak_next(); |
1673 } | 1638 } |
1674 | 1639 |
1675 static void VisitLiveObject(JSTypedArray* obj, | 1640 static void VisitLiveObject(Heap*, |
1676 MarkCompactCollector* collector, | 1641 JSTypedArray* obj, |
1677 WeakObjectRetainer* retainer, | 1642 WeakObjectRetainer* retainer, |
1678 bool record_slots) {} | 1643 bool record_slots) {} |
1679 | 1644 |
1680 static const int kWeakNextOffset = JSTypedArray::kWeakNextOffset; | 1645 static int WeakNextOffset() { |
1646 return JSTypedArray::kWeakNextOffset; | |
1647 } | |
1681 }; | 1648 }; |
1682 | 1649 |
1683 | 1650 |
1684 template<> | 1651 template<> |
1685 struct WeakListVisitor<JSArrayBuffer> { | 1652 struct WeakListVisitor<JSArrayBuffer> { |
1686 static void set_weak_next(JSArrayBuffer* obj, Object* next) { | 1653 static void SetWeakNext(JSArrayBuffer* obj, Object* next) { |
1687 obj->set_weak_next(next); | 1654 obj->set_weak_next(next); |
1688 } | 1655 } |
1689 | 1656 |
1690 static Object* get_weak_next(JSArrayBuffer* obj) { | 1657 static Object* WeakNext(JSArrayBuffer* obj) { |
1691 return obj->weak_next(); | 1658 return obj->weak_next(); |
1692 } | 1659 } |
1693 | 1660 |
1694 static void VisitLiveObject(JSArrayBuffer* array_buffer, | 1661 static void VisitLiveObject(Heap* heap, |
1695 MarkCompactCollector* collector, | 1662 JSArrayBuffer* array_buffer, |
1696 WeakObjectRetainer* retainer, | 1663 WeakObjectRetainer* retainer, |
1697 bool record_slots) { | 1664 bool record_slots) { |
1698 Object* typed_array_obj = | 1665 Object* typed_array_obj = |
1699 VisitWeakList<JSTypedArray>(array_buffer->weak_first_array(), | 1666 VisitWeakList<JSTypedArray>( |
1700 collector, retainer, record_slots); | 1667 heap, |
1668 array_buffer->weak_first_array(), | |
1669 retainer, record_slots); | |
1701 array_buffer->set_weak_first_array(typed_array_obj); | 1670 array_buffer->set_weak_first_array(typed_array_obj); |
1702 if (typed_array_obj != Smi::FromInt(0) && record_slots) { | 1671 if (typed_array_obj != heap->undefined_value() && record_slots) { |
1703 Object** slot = HeapObject::RawField( | 1672 Object** slot = HeapObject::RawField( |
1704 array_buffer, JSArrayBuffer::kWeakFirstArrayOffset); | 1673 array_buffer, JSArrayBuffer::kWeakFirstArrayOffset); |
1705 collector->RecordSlot(slot, slot, typed_array_obj); | 1674 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj); |
1706 } | 1675 } |
1707 } | 1676 } |
1708 | 1677 |
1709 static const int kWeakNextOffset = JSArrayBuffer::kWeakNextOffset; | 1678 static int WeakNextOffset() { |
1679 return JSArrayBuffer::kWeakNextOffset; | |
1680 } | |
1710 }; | 1681 }; |
1711 | 1682 |
1712 | 1683 |
1713 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, | 1684 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, |
1714 bool record_slots) { | 1685 bool record_slots) { |
1715 Object* array_buffer_obj = | 1686 Object* array_buffer_obj = |
1716 VisitWeakList<JSArrayBuffer>(array_buffers_list(), | 1687 VisitWeakList<JSArrayBuffer>(this, |
1717 mark_compact_collector(), | 1688 array_buffers_list(), |
1718 retainer, record_slots); | 1689 retainer, record_slots); |
1719 set_array_buffers_list(array_buffer_obj); | 1690 set_array_buffers_list(array_buffer_obj); |
1720 } | 1691 } |
1721 | 1692 |
1722 | 1693 |
1723 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { | 1694 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { |
1724 DisallowHeapAllocation no_allocation; | 1695 DisallowHeapAllocation no_allocation; |
1725 | 1696 |
1726 // Both the external string table and the string table may contain | 1697 // Both the external string table and the string table may contain |
1727 // external strings, but neither lists them exhaustively, nor is the | 1698 // external strings, but neither lists them exhaustively, nor is the |
(...skipping 5050 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6778 | 6749 |
6779 bool Heap::CreateHeapObjects() { | 6750 bool Heap::CreateHeapObjects() { |
6780 // Create initial maps. | 6751 // Create initial maps. |
6781 if (!CreateInitialMaps()) return false; | 6752 if (!CreateInitialMaps()) return false; |
6782 if (!CreateApiObjects()) return false; | 6753 if (!CreateApiObjects()) return false; |
6783 | 6754 |
6784 // Create initial objects | 6755 // Create initial objects |
6785 if (!CreateInitialObjects()) return false; | 6756 if (!CreateInitialObjects()) return false; |
6786 | 6757 |
6787 native_contexts_list_ = undefined_value(); | 6758 native_contexts_list_ = undefined_value(); |
6759 array_buffers_list_ = undefined_value(); | |
6788 return true; | 6760 return true; |
6789 } | 6761 } |
6790 | 6762 |
6791 | 6763 |
6792 void Heap::SetStackLimits() { | 6764 void Heap::SetStackLimits() { |
6793 ASSERT(isolate_ != NULL); | 6765 ASSERT(isolate_ != NULL); |
6794 ASSERT(isolate_ == isolate()); | 6766 ASSERT(isolate_ == isolate()); |
6795 // On 64 bit machines, pointers are generally out of range of Smis. We write | 6767 // On 64 bit machines, pointers are generally out of range of Smis. We write |
6796 // something that looks like an out of range Smi to the GC. | 6768 // something that looks like an out of range Smi to the GC. |
6797 | 6769 |
(...skipping 1216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8014 if (FLAG_parallel_recompilation) { | 7986 if (FLAG_parallel_recompilation) { |
8015 heap_->relocation_mutex_->Lock(); | 7987 heap_->relocation_mutex_->Lock(); |
8016 #ifdef DEBUG | 7988 #ifdef DEBUG |
8017 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7989 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
8018 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7990 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
8019 #endif // DEBUG | 7991 #endif // DEBUG |
8020 } | 7992 } |
8021 } | 7993 } |
8022 | 7994 |
8023 } } // namespace v8::internal | 7995 } } // namespace v8::internal |
OLD | NEW |