Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1472 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1483 if (external_string_table_.old_space_strings_.length() > 0) { | 1483 if (external_string_table_.old_space_strings_.length() > 0) { |
| 1484 Object** start = &external_string_table_.old_space_strings_[0]; | 1484 Object** start = &external_string_table_.old_space_strings_[0]; |
| 1485 Object** end = start + external_string_table_.old_space_strings_.length(); | 1485 Object** end = start + external_string_table_.old_space_strings_.length(); |
| 1486 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); | 1486 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); |
| 1487 } | 1487 } |
| 1488 | 1488 |
| 1489 UpdateNewSpaceReferencesInExternalStringTable(updater_func); | 1489 UpdateNewSpaceReferencesInExternalStringTable(updater_func); |
| 1490 } | 1490 } |
| 1491 | 1491 |
| 1492 | 1492 |
| 1493 static Object* ProcessFunctionWeakReferences(Heap* heap, | 1493 template <class T> |
| 1494 Object* function, | 1494 struct WeakListVisitor; |
| 1495 WeakObjectRetainer* retainer, | 1495 |
| 1496 bool record_slots) { | 1496 |
| 1497 template <class T> | |
| 1498 static Object* VisitWeakList(Object* list, | |
| 1499 Heap* heap, | |
|
Michael Starzinger
2013/06/11 12:59:53
nit: Indentation is off, also put the "record_slot
Dmitry Lomov (no reviews)
2013/06/11 14:47:11
Done.
| |
| 1500 WeakObjectRetainer* retainer, bool record_slots) { | |
| 1497 Object* undefined = heap->undefined_value(); | 1501 Object* undefined = heap->undefined_value(); |
| 1498 Object* head = undefined; | 1502 Object* head = undefined; |
| 1499 JSFunction* tail = NULL; | 1503 T* tail = NULL; |
| 1500 Object* candidate = function; | 1504 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 1501 while (candidate != undefined) { | 1505 while (list != undefined) { |
| 1502 // Check whether to keep the candidate in the list. | 1506 // Check whether to keep the candidate in the list. |
| 1503 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate); | 1507 T* candidate = reinterpret_cast<T*>(list); |
| 1504 Object* retain = retainer->RetainAs(candidate); | 1508 Object* retained = retainer->RetainAs(list); |
| 1505 if (retain != NULL) { | 1509 if (retained != NULL) { |
| 1506 if (head == undefined) { | 1510 if (head == undefined) { |
| 1507 // First element in the list. | 1511 // First element in the list. |
| 1508 head = retain; | 1512 head = retained; |
| 1509 } else { | 1513 } else { |
| 1510 // Subsequent elements in the list. | 1514 // Subsequent elements in the list. |
| 1511 ASSERT(tail != NULL); | 1515 ASSERT(tail != NULL); |
| 1512 tail->set_next_function_link(retain); | 1516 WeakListVisitor<T>::set_weak_next(heap, tail, retained); |
| 1513 if (record_slots) { | 1517 if (record_slots) { |
| 1514 Object** next_function = | 1518 Object** next_slot = |
| 1515 HeapObject::RawField(tail, JSFunction::kNextFunctionLinkOffset); | 1519 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset()); |
| 1516 heap->mark_compact_collector()->RecordSlot( | 1520 collector->RecordSlot(next_slot, next_slot, retained); |
| 1517 next_function, next_function, retain); | |
| 1518 } | 1521 } |
| 1519 } | 1522 } |
| 1520 // Retained function is new tail. | 1523 // Retained object is new tail. |
| 1521 candidate_function = reinterpret_cast<JSFunction*>(retain); | 1524 ASSERT(!retained->IsUndefined()); |
| 1522 tail = candidate_function; | 1525 candidate = reinterpret_cast<T*>(retained); |
| 1526 tail = candidate; | |
| 1523 | 1527 |
| 1524 ASSERT(retain->IsUndefined() || retain->IsJSFunction()); | |
| 1525 | 1528 |
| 1526 if (retain == undefined) break; | 1529 // tail is a live object, visit it. |
| 1530 WeakListVisitor<T>::VisitLiveObject( | |
| 1531 tail, heap, retainer, record_slots); | |
| 1527 } | 1532 } |
| 1528 | 1533 |
| 1529 // Move to next element in the list. | 1534 // Move to next element in the list. |
| 1530 candidate = candidate_function->next_function_link(); | 1535 list = WeakListVisitor<T>::get_weak_next(candidate); |
| 1531 } | 1536 } |
| 1532 | 1537 |
| 1533 // Terminate the list if there is one or more elements. | 1538 // Terminate the list if there is one or more elements. |
| 1534 if (tail != NULL) { | 1539 if (tail != NULL) { |
| 1535 tail->set_next_function_link(undefined); | 1540 WeakListVisitor<T>::set_weak_next(heap, tail, undefined); |
| 1541 } | |
| 1542 return head; | |
| 1543 } | |
| 1544 | |
| 1545 | |
| 1546 template<> | |
| 1547 struct WeakListVisitor<JSFunction> { | |
| 1548 static void set_weak_next(Heap*, JSFunction* function, Object* next) { | |
| 1549 function->set_next_function_link(next); | |
| 1536 } | 1550 } |
| 1537 | 1551 |
| 1538 return head; | 1552 static Object* get_weak_next(JSFunction* function) { |
| 1539 } | 1553 return function->next_function_link(); |
| 1554 } | |
| 1555 | |
| 1556 static int WeakNextOffset() { | |
| 1557 return JSFunction::kNextFunctionLinkOffset; | |
| 1558 } | |
| 1559 | |
| 1560 static void VisitLiveObject(JSFunction*, Heap*, | |
| 1561 WeakObjectRetainer*, bool) { | |
| 1562 } | |
| 1563 }; | |
| 1564 | |
| 1565 | |
| 1566 template<> | |
| 1567 struct WeakListVisitor<Context> { | |
| 1568 static void set_weak_next(Heap* heap, Context* context, Object* next) { | |
| 1569 context->set_unchecked(heap, | |
|
Michael Starzinger
2013/06/11 12:59:53
I think we can finally switch to use the checked a
Dmitry Lomov (no reviews)
2013/06/11 14:47:11
Done.
| |
| 1570 Context::NEXT_CONTEXT_LINK, | |
| 1571 next, | |
| 1572 UPDATE_WRITE_BARRIER); | |
| 1573 } | |
| 1574 | |
| 1575 static Object* get_weak_next(Context* context) { | |
| 1576 return context->get(Context::NEXT_CONTEXT_LINK); | |
| 1577 } | |
| 1578 | |
| 1579 static void VisitLiveObject(Context* context, | |
| 1580 Heap* heap, | |
| 1581 WeakObjectRetainer* retainer, | |
| 1582 bool record_slots) { | |
| 1583 // Process the weak list of optimized functions for the context. | |
| 1584 Object* function_list_head = | |
| 1585 VisitWeakList<JSFunction>( | |
| 1586 context->get(Context::OPTIMIZED_FUNCTIONS_LIST), | |
| 1587 heap, | |
| 1588 retainer, | |
| 1589 record_slots); | |
| 1590 context->set_unchecked(heap, | |
| 1591 Context::OPTIMIZED_FUNCTIONS_LIST, | |
| 1592 function_list_head, | |
| 1593 UPDATE_WRITE_BARRIER); | |
| 1594 if (record_slots) { | |
| 1595 Object** optimized_functions = | |
| 1596 HeapObject::RawField( | |
| 1597 context, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST)); | |
| 1598 heap->mark_compact_collector()->RecordSlot( | |
| 1599 optimized_functions, optimized_functions, function_list_head); | |
| 1600 } | |
| 1601 } | |
| 1602 | |
| 1603 static int WeakNextOffset() { | |
| 1604 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); | |
| 1605 } | |
| 1606 }; | |
| 1540 | 1607 |
| 1541 | 1608 |
| 1542 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { | 1609 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { |
| 1543 // We don't record weak slots during marking or scavenges. | 1610 // We don't record weak slots during marking or scavenges. |
| 1544 // Instead we do it once when we complete mark-compact cycle. | 1611 // Instead we do it once when we complete mark-compact cycle. |
| 1545 // Note that write barrier has no effect if we are already in the middle of | 1612 // Note that write barrier has no effect if we are already in the middle of |
| 1546 // compacting mark-sweep cycle and we have to record slots manually. | 1613 // compacting mark-sweep cycle and we have to record slots manually. |
| 1547 bool record_slots = | 1614 bool record_slots = |
| 1548 gc_state() == MARK_COMPACT && | 1615 gc_state() == MARK_COMPACT && |
| 1549 mark_compact_collector()->is_compacting(); | 1616 mark_compact_collector()->is_compacting(); |
| 1550 ProcessArrayBuffers(retainer, record_slots); | 1617 ProcessArrayBuffers(retainer, record_slots); |
| 1551 ProcessNativeContexts(retainer, record_slots); | 1618 ProcessNativeContexts(retainer, record_slots); |
| 1552 } | 1619 } |
| 1553 | 1620 |
| 1554 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, | 1621 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, |
| 1555 bool record_slots) { | 1622 bool record_slots) { |
| 1556 Object* undefined = undefined_value(); | 1623 Object* head = |
| 1557 Object* head = undefined; | 1624 VisitWeakList<Context>( |
| 1558 Context* tail = NULL; | 1625 native_contexts_list(), this, retainer, record_slots); |
| 1559 Object* candidate = native_contexts_list_; | |
| 1560 | |
| 1561 while (candidate != undefined) { | |
| 1562 // Check whether to keep the candidate in the list. | |
| 1563 Context* candidate_context = reinterpret_cast<Context*>(candidate); | |
| 1564 Object* retain = retainer->RetainAs(candidate); | |
| 1565 if (retain != NULL) { | |
| 1566 if (head == undefined) { | |
| 1567 // First element in the list. | |
| 1568 head = retain; | |
| 1569 } else { | |
| 1570 // Subsequent elements in the list. | |
| 1571 ASSERT(tail != NULL); | |
| 1572 tail->set_unchecked(this, | |
| 1573 Context::NEXT_CONTEXT_LINK, | |
| 1574 retain, | |
| 1575 UPDATE_WRITE_BARRIER); | |
| 1576 | |
| 1577 if (record_slots) { | |
| 1578 Object** next_context = | |
| 1579 HeapObject::RawField( | |
| 1580 tail, FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK)); | |
| 1581 mark_compact_collector()->RecordSlot( | |
| 1582 next_context, next_context, retain); | |
| 1583 } | |
| 1584 } | |
| 1585 // Retained context is new tail. | |
| 1586 candidate_context = reinterpret_cast<Context*>(retain); | |
| 1587 tail = candidate_context; | |
| 1588 | |
| 1589 if (retain == undefined) break; | |
| 1590 | |
| 1591 // Process the weak list of optimized functions for the context. | |
| 1592 Object* function_list_head = | |
| 1593 ProcessFunctionWeakReferences( | |
| 1594 this, | |
| 1595 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST), | |
| 1596 retainer, | |
| 1597 record_slots); | |
| 1598 candidate_context->set_unchecked(this, | |
| 1599 Context::OPTIMIZED_FUNCTIONS_LIST, | |
| 1600 function_list_head, | |
| 1601 UPDATE_WRITE_BARRIER); | |
| 1602 if (record_slots) { | |
| 1603 Object** optimized_functions = | |
| 1604 HeapObject::RawField( | |
| 1605 tail, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST)); | |
| 1606 mark_compact_collector()->RecordSlot( | |
| 1607 optimized_functions, optimized_functions, function_list_head); | |
| 1608 } | |
| 1609 } | |
| 1610 | |
| 1611 // Move to next element in the list. | |
| 1612 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK); | |
| 1613 } | |
| 1614 | |
| 1615 // Terminate the list if there is one or more elements. | |
| 1616 if (tail != NULL) { | |
| 1617 tail->set_unchecked(this, | |
| 1618 Context::NEXT_CONTEXT_LINK, | |
| 1619 Heap::undefined_value(), | |
| 1620 UPDATE_WRITE_BARRIER); | |
| 1621 } | |
| 1622 | |
| 1623 // Update the head of the list of contexts. | 1626 // Update the head of the list of contexts. |
| 1624 native_contexts_list_ = head; | 1627 native_contexts_list_ = head; |
| 1625 } | 1628 } |
| 1626 | 1629 |
| 1627 | 1630 |
| 1628 template <class T> | |
| 1629 struct WeakListVisitor; | |
| 1630 | |
| 1631 | |
| 1632 template <class T> | |
| 1633 static Object* VisitWeakList(Object* list, | |
| 1634 MarkCompactCollector* collector, | |
| 1635 WeakObjectRetainer* retainer, bool record_slots) { | |
| 1636 Object* head = Smi::FromInt(0); | |
| 1637 T* tail = NULL; | |
| 1638 while (list != Smi::FromInt(0)) { | |
| 1639 Object* retained = retainer->RetainAs(list); | |
| 1640 if (retained != NULL) { | |
| 1641 if (head == Smi::FromInt(0)) { | |
| 1642 head = retained; | |
| 1643 } else { | |
| 1644 ASSERT(tail != NULL); | |
| 1645 WeakListVisitor<T>::set_weak_next(tail, retained); | |
| 1646 if (record_slots) { | |
| 1647 Object** next_slot = | |
| 1648 HeapObject::RawField(tail, WeakListVisitor<T>::kWeakNextOffset); | |
| 1649 collector->RecordSlot(next_slot, next_slot, retained); | |
| 1650 } | |
| 1651 } | |
| 1652 tail = reinterpret_cast<T*>(retained); | |
| 1653 WeakListVisitor<T>::VisitLiveObject( | |
| 1654 tail, collector, retainer, record_slots); | |
| 1655 } | |
| 1656 list = WeakListVisitor<T>::get_weak_next(reinterpret_cast<T*>(list)); | |
| 1657 } | |
| 1658 if (tail != NULL) { | |
| 1659 tail->set_weak_next(Smi::FromInt(0)); | |
| 1660 } | |
| 1661 return head; | |
| 1662 } | |
| 1663 | |
| 1664 | |
| 1665 template<> | 1631 template<> |
| 1666 struct WeakListVisitor<JSTypedArray> { | 1632 struct WeakListVisitor<JSTypedArray> { |
| 1667 static void set_weak_next(JSTypedArray* obj, Object* next) { | 1633 static void set_weak_next(Heap*, JSTypedArray* obj, Object* next) { |
|
Michael Starzinger
2013/06/11 12:59:53
See my previous comment, I think we can drop the H
Dmitry Lomov (no reviews)
2013/06/11 14:47:11
Done.
| |
| 1668 obj->set_weak_next(next); | 1634 obj->set_weak_next(next); |
| 1669 } | 1635 } |
| 1670 | 1636 |
| 1671 static Object* get_weak_next(JSTypedArray* obj) { | 1637 static Object* get_weak_next(JSTypedArray* obj) { |
| 1672 return obj->weak_next(); | 1638 return obj->weak_next(); |
| 1673 } | 1639 } |
| 1674 | 1640 |
| 1675 static void VisitLiveObject(JSTypedArray* obj, | 1641 static void VisitLiveObject(JSTypedArray* obj, |
| 1676 MarkCompactCollector* collector, | 1642 Heap* heap, |
|
Michael Starzinger
2013/06/11 12:59:53
nit: Can we make the Heap pointer be the first arg
Dmitry Lomov (no reviews)
2013/06/11 14:47:11
Done.
| |
| 1677 WeakObjectRetainer* retainer, | 1643 WeakObjectRetainer* retainer, |
| 1678 bool record_slots) {} | 1644 bool record_slots) {} |
| 1679 | 1645 |
| 1680 static const int kWeakNextOffset = JSTypedArray::kWeakNextOffset; | 1646 static int WeakNextOffset() { |
| 1647 return JSTypedArray::kWeakNextOffset; | |
| 1648 } | |
| 1681 }; | 1649 }; |
| 1682 | 1650 |
| 1683 | 1651 |
| 1684 template<> | 1652 template<> |
| 1685 struct WeakListVisitor<JSArrayBuffer> { | 1653 struct WeakListVisitor<JSArrayBuffer> { |
| 1686 static void set_weak_next(JSArrayBuffer* obj, Object* next) { | 1654 static void set_weak_next(Heap*, JSArrayBuffer* obj, Object* next) { |
| 1687 obj->set_weak_next(next); | 1655 obj->set_weak_next(next); |
| 1688 } | 1656 } |
| 1689 | 1657 |
| 1690 static Object* get_weak_next(JSArrayBuffer* obj) { | 1658 static Object* get_weak_next(JSArrayBuffer* obj) { |
| 1691 return obj->weak_next(); | 1659 return obj->weak_next(); |
| 1692 } | 1660 } |
| 1693 | 1661 |
| 1694 static void VisitLiveObject(JSArrayBuffer* array_buffer, | 1662 static void VisitLiveObject(JSArrayBuffer* array_buffer, |
| 1695 MarkCompactCollector* collector, | 1663 Heap* heap, |
| 1696 WeakObjectRetainer* retainer, | 1664 WeakObjectRetainer* retainer, |
| 1697 bool record_slots) { | 1665 bool record_slots) { |
| 1698 Object* typed_array_obj = | 1666 Object* typed_array_obj = |
| 1699 VisitWeakList<JSTypedArray>(array_buffer->weak_first_array(), | 1667 VisitWeakList<JSTypedArray>(array_buffer->weak_first_array(), |
| 1700 collector, retainer, record_slots); | 1668 heap, retainer, record_slots); |
| 1701 array_buffer->set_weak_first_array(typed_array_obj); | 1669 array_buffer->set_weak_first_array(typed_array_obj); |
| 1702 if (typed_array_obj != Smi::FromInt(0) && record_slots) { | 1670 if (typed_array_obj != heap->undefined_value() && record_slots) { |
| 1703 Object** slot = HeapObject::RawField( | 1671 Object** slot = HeapObject::RawField( |
| 1704 array_buffer, JSArrayBuffer::kWeakFirstArrayOffset); | 1672 array_buffer, JSArrayBuffer::kWeakFirstArrayOffset); |
| 1705 collector->RecordSlot(slot, slot, typed_array_obj); | 1673 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj); |
| 1706 } | 1674 } |
| 1707 } | 1675 } |
| 1708 | 1676 |
| 1709 static const int kWeakNextOffset = JSArrayBuffer::kWeakNextOffset; | 1677 static int WeakNextOffset() { |
| 1678 return JSArrayBuffer::kWeakNextOffset; | |
| 1679 } | |
| 1710 }; | 1680 }; |
| 1711 | 1681 |
| 1712 | 1682 |
| 1713 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, | 1683 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, |
| 1714 bool record_slots) { | 1684 bool record_slots) { |
| 1715 Object* array_buffer_obj = | 1685 Object* array_buffer_obj = |
| 1716 VisitWeakList<JSArrayBuffer>(array_buffers_list(), | 1686 VisitWeakList<JSArrayBuffer>(array_buffers_list(), |
| 1717 mark_compact_collector(), | 1687 this, |
| 1718 retainer, record_slots); | 1688 retainer, record_slots); |
| 1719 set_array_buffers_list(array_buffer_obj); | 1689 set_array_buffers_list(array_buffer_obj); |
| 1720 } | 1690 } |
| 1721 | 1691 |
| 1722 | 1692 |
| 1723 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { | 1693 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { |
| 1724 DisallowHeapAllocation no_allocation; | 1694 DisallowHeapAllocation no_allocation; |
| 1725 | 1695 |
| 1726 // Both the external string table and the string table may contain | 1696 // Both the external string table and the string table may contain |
| 1727 // external strings, but neither lists them exhaustively, nor is the | 1697 // external strings, but neither lists them exhaustively, nor is the |
| (...skipping 5050 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6778 | 6748 |
| 6779 bool Heap::CreateHeapObjects() { | 6749 bool Heap::CreateHeapObjects() { |
| 6780 // Create initial maps. | 6750 // Create initial maps. |
| 6781 if (!CreateInitialMaps()) return false; | 6751 if (!CreateInitialMaps()) return false; |
| 6782 if (!CreateApiObjects()) return false; | 6752 if (!CreateApiObjects()) return false; |
| 6783 | 6753 |
| 6784 // Create initial objects | 6754 // Create initial objects |
| 6785 if (!CreateInitialObjects()) return false; | 6755 if (!CreateInitialObjects()) return false; |
| 6786 | 6756 |
| 6787 native_contexts_list_ = undefined_value(); | 6757 native_contexts_list_ = undefined_value(); |
| 6758 array_buffers_list_ = undefined_value(); | |
| 6788 return true; | 6759 return true; |
| 6789 } | 6760 } |
| 6790 | 6761 |
| 6791 | 6762 |
| 6792 void Heap::SetStackLimits() { | 6763 void Heap::SetStackLimits() { |
| 6793 ASSERT(isolate_ != NULL); | 6764 ASSERT(isolate_ != NULL); |
| 6794 ASSERT(isolate_ == isolate()); | 6765 ASSERT(isolate_ == isolate()); |
| 6795 // On 64 bit machines, pointers are generally out of range of Smis. We write | 6766 // On 64 bit machines, pointers are generally out of range of Smis. We write |
| 6796 // something that looks like an out of range Smi to the GC. | 6767 // something that looks like an out of range Smi to the GC. |
| 6797 | 6768 |
| (...skipping 1216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 8014 if (FLAG_parallel_recompilation) { | 7985 if (FLAG_parallel_recompilation) { |
| 8015 heap_->relocation_mutex_->Lock(); | 7986 heap_->relocation_mutex_->Lock(); |
| 8016 #ifdef DEBUG | 7987 #ifdef DEBUG |
| 8017 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7988 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
| 8018 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7989 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
| 8019 #endif // DEBUG | 7990 #endif // DEBUG |
| 8020 } | 7991 } |
| 8021 } | 7992 } |
| 8022 | 7993 |
| 8023 } } // namespace v8::internal | 7994 } } // namespace v8::internal |
| OLD | NEW |