Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(43)

Side by Side Diff: src/heap.cc

Issue 7863: - Optimized CopyFixedArray and CopyJSObject. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 12 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/objects.h » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 708 matching lines...) Expand 10 before | Expand all | Expand 10 after
719 if (new_space_.Contains(obj)) { 719 if (new_space_.Contains(obj)) {
720 new_space_.RecordAllocation(obj); 720 new_space_.RecordAllocation(obj);
721 } else { 721 } else {
722 new_space_.RecordPromotion(obj); 722 new_space_.RecordPromotion(obj);
723 } 723 }
724 } 724 }
725 } 725 }
726 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 726 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
727 727
728 728
729
729 HeapObject* Heap::MigrateObject(HeapObject* source, 730 HeapObject* Heap::MigrateObject(HeapObject* source,
730 HeapObject* target, 731 HeapObject* target,
731 int size) { 732 int size) {
732 void** src = reinterpret_cast<void**>(source->address()); 733 // Copy the content of source to target.
733 void** dst = reinterpret_cast<void**>(target->address()); 734 CopyBlock(reinterpret_cast<Object**>(target->address()),
734 735 reinterpret_cast<Object**>(source->address()),
735 // Use block copying memcpy if the object we're migrating is big 736 size);
736 // enough to justify the extra call/setup overhead.
737 static const int kBlockCopyLimit = 16 * kPointerSize;
738
739 if (size >= kBlockCopyLimit) {
740 memcpy(dst, src, size);
741 } else {
742 int remaining = size / kPointerSize;
743 do {
744 remaining--;
745 *dst++ = *src++;
746 } while (remaining > 0);
747 }
748 737
749 // Set the forwarding address. 738 // Set the forwarding address.
750 source->set_map_word(MapWord::FromForwardingAddress(target)); 739 source->set_map_word(MapWord::FromForwardingAddress(target));
751 740
752 // Update NewSpace stats if necessary. 741 // Update NewSpace stats if necessary.
753 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 742 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
754 RecordCopiedObject(target); 743 RecordCopiedObject(target);
755 #endif 744 #endif
756 745
757 return target; 746 return target;
(...skipping 824 matching lines...) Expand 10 before | Expand all | Expand 10 after
1582 result = lo_space_->AllocateRawCode(obj_size); 1571 result = lo_space_->AllocateRawCode(obj_size);
1583 } else { 1572 } else {
1584 result = code_space_->AllocateRaw(obj_size); 1573 result = code_space_->AllocateRaw(obj_size);
1585 } 1574 }
1586 1575
1587 if (result->IsFailure()) return result; 1576 if (result->IsFailure()) return result;
1588 1577
1589 // Copy code object. 1578 // Copy code object.
1590 Address old_addr = code->address(); 1579 Address old_addr = code->address();
1591 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 1580 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
1592 memcpy(new_addr, old_addr, obj_size); 1581 CopyBlock(reinterpret_cast<Object**>(new_addr),
1593 1582 reinterpret_cast<Object**>(old_addr),
1583 obj_size);
1594 // Relocate the copy. 1584 // Relocate the copy.
1595 Code* new_code = Code::cast(result); 1585 Code* new_code = Code::cast(result);
1596 new_code->Relocate(new_addr - old_addr); 1586 new_code->Relocate(new_addr - old_addr);
1597 return new_code; 1587 return new_code;
1598 } 1588 }
1599 1589
1600 1590
1601 Object* Heap::Allocate(Map* map, AllocationSpace space) { 1591 Object* Heap::Allocate(Map* map, AllocationSpace space) {
1602 ASSERT(gc_state_ == NOT_IN_GC); 1592 ASSERT(gc_state_ == NOT_IN_GC);
1603 ASSERT(map->instance_type() != MAP_TYPE); 1593 ASSERT(map->instance_type() != MAP_TYPE);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1650 Object* Heap::AllocateArgumentsObject(Object* callee, int length) { 1640 Object* Heap::AllocateArgumentsObject(Object* callee, int length) {
1651 // To get fast allocation and map sharing for arguments objects we 1641 // To get fast allocation and map sharing for arguments objects we
1652 // allocate them based on an arguments boilerplate. 1642 // allocate them based on an arguments boilerplate.
1653 1643
1654 // This calls Copy directly rather than using Heap::AllocateRaw so we 1644 // This calls Copy directly rather than using Heap::AllocateRaw so we
1655 // duplicate the check here. 1645 // duplicate the check here.
1656 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 1646 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
1657 1647
1658 JSObject* boilerplate = 1648 JSObject* boilerplate =
1659 Top::context()->global_context()->arguments_boilerplate(); 1649 Top::context()->global_context()->arguments_boilerplate();
1660 Object* result = boilerplate->Copy(); 1650 Object* result = CopyJSObject(boilerplate);
1661 if (result->IsFailure()) return result; 1651 if (result->IsFailure()) return result;
1662 1652
1663 Object* obj = JSObject::cast(result)->properties(); 1653 Object* obj = JSObject::cast(result)->properties();
1664 FixedArray::cast(obj)->set(arguments_callee_index, callee); 1654 FixedArray::cast(obj)->set(arguments_callee_index, callee);
1665 FixedArray::cast(obj)->set(arguments_length_index, Smi::FromInt(length)); 1655 FixedArray::cast(obj)->set(arguments_length_index, Smi::FromInt(length));
1666 1656
1667 // Allocate the fixed array. 1657 // Allocate the fixed array.
1668 obj = Heap::AllocateFixedArray(length); 1658 obj = Heap::AllocateFixedArray(length);
1669 if (obj->IsFailure()) return obj; 1659 if (obj->IsFailure()) return obj;
1670 JSObject::cast(result)->set_elements(FixedArray::cast(obj)); 1660 JSObject::cast(result)->set_elements(FixedArray::cast(obj));
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1757 Object* initial_map = AllocateInitialMap(constructor); 1747 Object* initial_map = AllocateInitialMap(constructor);
1758 if (initial_map->IsFailure()) return initial_map; 1748 if (initial_map->IsFailure()) return initial_map;
1759 constructor->set_initial_map(Map::cast(initial_map)); 1749 constructor->set_initial_map(Map::cast(initial_map));
1760 Map::cast(initial_map)->set_constructor(constructor); 1750 Map::cast(initial_map)->set_constructor(constructor);
1761 } 1751 }
1762 // Allocate the object based on the constructors initial map. 1752 // Allocate the object based on the constructors initial map.
1763 return AllocateJSObjectFromMap(constructor->initial_map(), pretenure); 1753 return AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
1764 } 1754 }
1765 1755
1766 1756
1757 Object* Heap::CopyJSObject(JSObject* source) {
1758 // Never used to copy functions. If functions need to be copied we
1759 // have to be careful to clear the literals array.
1760 ASSERT(!source->IsJSFunction());
1761
1762 // Make the clone.
1763 Map* map = source->map();
1764 int object_size = map->instance_size();
1765 Object* clone = new_space_.AllocateRaw(object_size);
Kasper Lund 2008/10/22 08:17:21 Extra space before new_space_.
1766 if (clone->IsFailure()) return clone;
1767 ASSERT(Heap::InNewSpace(clone));
1768
1769 // Copy the content.
1770 CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(clone)->address()),
1771 reinterpret_cast<Object**>(source->address()),
1772 object_size);
1773
1774 FixedArray* elements = FixedArray::cast(source->elements());
1775 FixedArray* properties = FixedArray::cast(source->properties());
1776 // Update elements if necessary.
1777 if (elements->length()> 0) {
1778 Object* elem = Heap::CopyFixedArray(elements);
Kasper Lund 2008/10/22 08:17:21 Extra space before Heap.
1779 if (elem->IsFailure()) return elem;
1780 JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
1781 }
1782 // Update properties if necessary.
1783 if (properties->length() > 0) {
1784 Object* prop = Heap::CopyFixedArray(properties);
1785 if (prop->IsFailure()) return prop;
1786 JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
1787 }
1788 // Return the new clone.
1789 return clone;
1790 }
1791
1792
1767 Object* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor, 1793 Object* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
1768 JSGlobalProxy* object) { 1794 JSGlobalProxy* object) {
1769 // Allocate initial map if absent. 1795 // Allocate initial map if absent.
1770 if (!constructor->has_initial_map()) { 1796 if (!constructor->has_initial_map()) {
1771 Object* initial_map = AllocateInitialMap(constructor); 1797 Object* initial_map = AllocateInitialMap(constructor);
1772 if (initial_map->IsFailure()) return initial_map; 1798 if (initial_map->IsFailure()) return initial_map;
1773 constructor->set_initial_map(Map::cast(initial_map)); 1799 constructor->set_initial_map(Map::cast(initial_map));
1774 Map::cast(initial_map)->set_constructor(constructor); 1800 Map::cast(initial_map)->set_constructor(constructor);
1775 } 1801 }
1776 1802
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after
2057 // Allocate the raw data for a fixed array. 2083 // Allocate the raw data for a fixed array.
2058 int size = FixedArray::SizeFor(length); 2084 int size = FixedArray::SizeFor(length);
2059 return (size > MaxHeapObjectSize()) 2085 return (size > MaxHeapObjectSize())
2060 ? lo_space_->AllocateRawFixedArray(size) 2086 ? lo_space_->AllocateRawFixedArray(size)
2061 : new_space_.AllocateRaw(size); 2087 : new_space_.AllocateRaw(size);
2062 } 2088 }
2063 2089
2064 2090
2065 Object* Heap::CopyFixedArray(FixedArray* src) { 2091 Object* Heap::CopyFixedArray(FixedArray* src) {
2066 int len = src->length(); 2092 int len = src->length();
2067 Object* obj = Heap::AllocateRawFixedArray(len); 2093 Object* obj = AllocateRawFixedArray(len);
2068 if (obj->IsFailure()) return obj; 2094 if (obj->IsFailure()) return obj;
2095 if (Heap::InNewSpace(obj)) {
2096 HeapObject* dst = HeapObject::cast(obj);
2097 CopyBlock(reinterpret_cast<Object**>(dst->address()),
2098 reinterpret_cast<Object**>(src->address()),
2099 FixedArray::SizeFor(len));
2100 return obj;
2101 }
2069 HeapObject::cast(obj)->set_map(src->map()); 2102 HeapObject::cast(obj)->set_map(src->map());
2070 FixedArray* result = FixedArray::cast(obj); 2103 FixedArray* result = FixedArray::cast(obj);
2071 result->set_length(len); 2104 result->set_length(len);
2072 FixedArray::WriteBarrierMode mode = result->GetWriteBarrierMode();
2073 // Copy the content 2105 // Copy the content
2074 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); 2106 for (int i = 0; i < len; i++) result->set(i, src->get(i));
2075 return result; 2107 return result;
2076 } 2108 }
2077 2109
2078 2110
2079 Object* Heap::AllocateFixedArray(int length) { 2111 Object* Heap::AllocateFixedArray(int length) {
2080 Object* result = AllocateRawFixedArray(length); 2112 Object* result = AllocateRawFixedArray(length);
2081 if (!result->IsFailure()) { 2113 if (!result->IsFailure()) {
2082 // Initialize header. 2114 // Initialize header.
2083 reinterpret_cast<Array*>(result)->set_map(fixed_array_map()); 2115 reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2084 FixedArray* array = FixedArray::cast(result); 2116 FixedArray* array = FixedArray::cast(result);
(...skipping 1043 matching lines...) Expand 10 before | Expand all | Expand 10 after
3128 #ifdef DEBUG 3160 #ifdef DEBUG
3129 bool Heap::GarbageCollectionGreedyCheck() { 3161 bool Heap::GarbageCollectionGreedyCheck() {
3130 ASSERT(FLAG_gc_greedy); 3162 ASSERT(FLAG_gc_greedy);
3131 if (Bootstrapper::IsActive()) return true; 3163 if (Bootstrapper::IsActive()) return true;
3132 if (disallow_allocation_failure()) return true; 3164 if (disallow_allocation_failure()) return true;
3133 return CollectGarbage(0, NEW_SPACE); 3165 return CollectGarbage(0, NEW_SPACE);
3134 } 3166 }
3135 #endif 3167 #endif
3136 3168
3137 } } // namespace v8::internal 3169 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/objects.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698