Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/heap.cc

Issue 136001: Changed allocation to allow large objects to be allocated in new space.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/mark-compact.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 703 matching lines...) Expand 10 before | Expand all | Expand 10 after
714 new_space_front += object->Size(); 714 new_space_front += object->Size();
715 } 715 }
716 716
717 // Promote and process all the to-be-promoted objects. 717 // Promote and process all the to-be-promoted objects.
718 while (!promotion_queue.is_empty()) { 718 while (!promotion_queue.is_empty()) {
719 HeapObject* source; 719 HeapObject* source;
720 Map* map; 720 Map* map;
721 promotion_queue.remove(&source, &map); 721 promotion_queue.remove(&source, &map);
722 // Copy the from-space object to its new location (given by the 722 // Copy the from-space object to its new location (given by the
723 // forwarding address) and fix its map. 723 // forwarding address) and fix its map.
724
Mads Ager (chromium) 2009/06/18 14:01:20 Accidental edit?
724 HeapObject* target = source->map_word().ToForwardingAddress(); 725 HeapObject* target = source->map_word().ToForwardingAddress();
725 CopyBlock(reinterpret_cast<Object**>(target->address()), 726 CopyBlock(reinterpret_cast<Object**>(target->address()),
726 reinterpret_cast<Object**>(source->address()), 727 reinterpret_cast<Object**>(source->address()),
727 source->SizeFromMap(map)); 728 source->SizeFromMap(map));
728 target->set_map(map); 729 target->set_map(map);
729 730
730 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 731 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
731 // Update NewSpace stats if necessary. 732 // Update NewSpace stats if necessary.
732 RecordCopiedObject(target); 733 RecordCopiedObject(target);
733 #endif 734 #endif
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after
936 } 937 }
937 938
938 int object_size = object->SizeFromMap(first_word.ToMap()); 939 int object_size = object->SizeFromMap(first_word.ToMap());
939 // We rely on live objects in new space to be at least two pointers, 940 // We rely on live objects in new space to be at least two pointers,
940 // so we can store the from-space address and map pointer of promoted 941 // so we can store the from-space address and map pointer of promoted
941 // objects in the to space. 942 // objects in the to space.
942 ASSERT(object_size >= 2 * kPointerSize); 943 ASSERT(object_size >= 2 * kPointerSize);
943 944
944 // If the object should be promoted, we try to copy it to old space. 945 // If the object should be promoted, we try to copy it to old space.
945 if (ShouldBePromoted(object->address(), object_size)) { 946 if (ShouldBePromoted(object->address(), object_size)) {
946 OldSpace* target_space = Heap::TargetSpace(object); 947 Object* result;
947 ASSERT(target_space == Heap::old_pointer_space_ || 948 if (object_size > MaxObjectSizeInPagedSpace()) {
948 target_space == Heap::old_data_space_); 949 result = lo_space_->AllocateRawFixedArray(object_size);
949 Object* result = target_space->AllocateRaw(object_size); 950 if (!result->IsFailure()) {
950 if (!result->IsFailure()) {
951 HeapObject* target = HeapObject::cast(result);
952 if (target_space == Heap::old_pointer_space_) {
953 // Save the from-space object pointer and its map pointer at the 951 // Save the from-space object pointer and its map pointer at the
954 // top of the to space to be swept and copied later. Write the 952 // top of the to space to be swept and copied later. Write the
955 // forwarding address over the map word of the from-space 953 // forwarding address over the map word of the from-space
956 // object. 954 // object.
955 HeapObject* target = HeapObject::cast(result);
957 promotion_queue.insert(object, first_word.ToMap()); 956 promotion_queue.insert(object, first_word.ToMap());
958 object->set_map_word(MapWord::FromForwardingAddress(target)); 957 object->set_map_word(MapWord::FromForwardingAddress(target));
959 958
960 // Give the space allocated for the result a proper map by 959 // Give the space allocated for the result a proper map by
961 // treating it as a free list node (not linked into the free 960 // treating it as a free list node (not linked into the free
962 // list). 961 // list).
963 FreeListNode* node = FreeListNode::FromAddress(target->address()); 962 FreeListNode* node = FreeListNode::FromAddress(target->address());
964 node->set_size(object_size); 963 node->set_size(object_size);
965 964
966 *p = target; 965 *p = target;
967 } else { 966 return;
968 // Objects promoted to the data space can be copied immediately 967 }
969 // and not revisited---we will never sweep that space for 968 } else {
970 // pointers and the copied objects do not contain pointers to 969 OldSpace* target_space = Heap::TargetSpace(object);
971 // new space objects. 970 ASSERT(target_space == Heap::old_pointer_space_ ||
972 *p = MigrateObject(object, target, object_size); 971 target_space == Heap::old_data_space_);
972 result = target_space->AllocateRaw(object_size);
973 if (!result->IsFailure()) {
974 HeapObject* target = HeapObject::cast(result);
975 if (target_space == Heap::old_pointer_space_) {
976 // Save the from-space object pointer and its map pointer at the
977 // top of the to space to be swept and copied later. Write the
978 // forwarding address over the map word of the from-space
979 // object.
980 promotion_queue.insert(object, first_word.ToMap());
981 object->set_map_word(MapWord::FromForwardingAddress(target));
982
983 // Give the space allocated for the result a proper map by
984 // treating it as a free list node (not linked into the free
985 // list).
986 FreeListNode* node = FreeListNode::FromAddress(target->address());
987 node->set_size(object_size);
988
989 *p = target;
990 } else {
991 // Objects promoted to the data space can be copied immediately
992 // and not revisited---we will never sweep that space for
993 // pointers and the copied objects do not contain pointers to
994 // new space objects.
995 *p = MigrateObject(object, target, object_size);
973 #ifdef DEBUG 996 #ifdef DEBUG
974 VerifyNonPointerSpacePointersVisitor v; 997 VerifyNonPointerSpacePointersVisitor v;
975 (*p)->Iterate(&v); 998 (*p)->Iterate(&v);
976 #endif 999 #endif
1000 }
1001 return;
977 } 1002 }
978 return;
979 } 1003 }
980 } 1004 }
981
982 // The object should remain in new space or the old space allocation failed. 1005 // The object should remain in new space or the old space allocation failed.
983 Object* result = new_space_.AllocateRaw(object_size); 1006 Object* result = new_space_.AllocateRaw(object_size);
984 // Failed allocation at this point is utterly unexpected. 1007 // Failed allocation at this point is utterly unexpected.
985 ASSERT(!result->IsFailure()); 1008 ASSERT(!result->IsFailure());
986 *p = MigrateObject(object, HeapObject::cast(result), object_size); 1009 *p = MigrateObject(object, HeapObject::cast(result), object_size);
987 } 1010 }
988 1011
989 1012
990 void Heap::ScavengePointer(HeapObject** p) { 1013 void Heap::ScavengePointer(HeapObject** p) {
991 ScavengeObject(p, *p); 1014 ScavengeObject(p, *p);
(...skipping 699 matching lines...) Expand 10 before | Expand all | Expand 10 after
1691 return answer; 1714 return answer;
1692 } 1715 }
1693 1716
1694 1717
1695 Object* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { 1718 Object* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
1696 if (pretenure == NOT_TENURED) { 1719 if (pretenure == NOT_TENURED) {
1697 return AllocateByteArray(length); 1720 return AllocateByteArray(length);
1698 } 1721 }
1699 int size = ByteArray::SizeFor(length); 1722 int size = ByteArray::SizeFor(length);
1700 AllocationSpace space = 1723 AllocationSpace space =
1701 size > MaxHeapObjectSize() ? LO_SPACE : OLD_DATA_SPACE; 1724 size > MaxObjectSizeInPagedSpace() ? LO_SPACE : OLD_DATA_SPACE;
1702 1725
1703 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE); 1726 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
1704 1727
1705 if (result->IsFailure()) return result; 1728 if (result->IsFailure()) return result;
1706 1729
1707 reinterpret_cast<Array*>(result)->set_map(byte_array_map()); 1730 reinterpret_cast<Array*>(result)->set_map(byte_array_map());
1708 reinterpret_cast<Array*>(result)->set_length(length); 1731 reinterpret_cast<Array*>(result)->set_length(length);
1709 return result; 1732 return result;
1710 } 1733 }
1711 1734
1712 1735
1713 Object* Heap::AllocateByteArray(int length) { 1736 Object* Heap::AllocateByteArray(int length) {
1714 int size = ByteArray::SizeFor(length); 1737 int size = ByteArray::SizeFor(length);
1715 AllocationSpace space = 1738 AllocationSpace space =
1716 size > MaxHeapObjectSize() ? LO_SPACE : NEW_SPACE; 1739 size > MaxObjectSizeInPagedSpace() ? LO_SPACE : NEW_SPACE;
1717 1740
1718 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE); 1741 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
1719 1742
1720 if (result->IsFailure()) return result; 1743 if (result->IsFailure()) return result;
1721 1744
1722 reinterpret_cast<Array*>(result)->set_map(byte_array_map()); 1745 reinterpret_cast<Array*>(result)->set_map(byte_array_map());
1723 reinterpret_cast<Array*>(result)->set_length(length); 1746 reinterpret_cast<Array*>(result)->set_length(length);
1724 return result; 1747 return result;
1725 } 1748 }
1726 1749
(...skipping 14 matching lines...) Expand all
1741 ZoneScopeInfo* sinfo, 1764 ZoneScopeInfo* sinfo,
1742 Code::Flags flags, 1765 Code::Flags flags,
1743 Handle<Object> self_reference) { 1766 Handle<Object> self_reference) {
1744 // Compute size 1767 // Compute size
1745 int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment); 1768 int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment);
1746 int sinfo_size = 0; 1769 int sinfo_size = 0;
1747 if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL); 1770 if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL);
1748 int obj_size = Code::SizeFor(body_size, sinfo_size); 1771 int obj_size = Code::SizeFor(body_size, sinfo_size);
1749 ASSERT(IsAligned(obj_size, Code::kCodeAlignment)); 1772 ASSERT(IsAligned(obj_size, Code::kCodeAlignment));
1750 Object* result; 1773 Object* result;
1751 if (obj_size > MaxHeapObjectSize()) { 1774 if (obj_size > MaxObjectSizeInPagedSpace()) {
1752 result = lo_space_->AllocateRawCode(obj_size); 1775 result = lo_space_->AllocateRawCode(obj_size);
1753 } else { 1776 } else {
1754 result = code_space_->AllocateRaw(obj_size); 1777 result = code_space_->AllocateRaw(obj_size);
1755 } 1778 }
1756 1779
1757 if (result->IsFailure()) return result; 1780 if (result->IsFailure()) return result;
1758 1781
1759 // Initialize the object 1782 // Initialize the object
1760 HeapObject::cast(result)->set_map(code_map()); 1783 HeapObject::cast(result)->set_map(code_map());
1761 Code* code = Code::cast(result); 1784 Code* code = Code::cast(result);
(...skipping 19 matching lines...) Expand all
1781 code->Verify(); 1804 code->Verify();
1782 #endif 1805 #endif
1783 return code; 1806 return code;
1784 } 1807 }
1785 1808
1786 1809
1787 Object* Heap::CopyCode(Code* code) { 1810 Object* Heap::CopyCode(Code* code) {
1788 // Allocate an object the same size as the code object. 1811 // Allocate an object the same size as the code object.
1789 int obj_size = code->Size(); 1812 int obj_size = code->Size();
1790 Object* result; 1813 Object* result;
1791 if (obj_size > MaxHeapObjectSize()) { 1814 if (obj_size > MaxObjectSizeInPagedSpace()) {
1792 result = lo_space_->AllocateRawCode(obj_size); 1815 result = lo_space_->AllocateRawCode(obj_size);
1793 } else { 1816 } else {
1794 result = code_space_->AllocateRaw(obj_size); 1817 result = code_space_->AllocateRaw(obj_size);
1795 } 1818 }
1796 1819
1797 if (result->IsFailure()) return result; 1820 if (result->IsFailure()) return result;
1798 1821
1799 // Copy code object. 1822 // Copy code object.
1800 Address old_addr = code->address(); 1823 Address old_addr = code->address();
1801 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 1824 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
1956 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); 1979 ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
1957 1980
1958 // Allocate the backing storage for the properties. 1981 // Allocate the backing storage for the properties.
1959 int prop_size = map->unused_property_fields() - map->inobject_properties(); 1982 int prop_size = map->unused_property_fields() - map->inobject_properties();
1960 Object* properties = AllocateFixedArray(prop_size); 1983 Object* properties = AllocateFixedArray(prop_size);
1961 if (properties->IsFailure()) return properties; 1984 if (properties->IsFailure()) return properties;
1962 1985
1963 // Allocate the JSObject. 1986 // Allocate the JSObject.
1964 AllocationSpace space = 1987 AllocationSpace space =
1965 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; 1988 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
1966 if (map->instance_size() > MaxHeapObjectSize()) space = LO_SPACE; 1989 if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
1967 Object* obj = Allocate(map, space); 1990 Object* obj = Allocate(map, space);
1968 if (obj->IsFailure()) return obj; 1991 if (obj->IsFailure()) return obj;
1969 1992
1970 // Initialize the JSObject. 1993 // Initialize the JSObject.
1971 InitializeJSObjectFromMap(JSObject::cast(obj), 1994 InitializeJSObjectFromMap(JSObject::cast(obj),
1972 FixedArray::cast(properties), 1995 FixedArray::cast(properties),
1973 map); 1996 map);
1974 return obj; 1997 return obj;
1975 } 1998 }
1976 1999
(...skipping 266 matching lines...) Expand 10 before | Expand all | Expand 10 after
2243 } else if (chars <= String::kMaxMediumStringSize) { 2266 } else if (chars <= String::kMaxMediumStringSize) {
2244 map = medium_symbol_map(); 2267 map = medium_symbol_map();
2245 } else { 2268 } else {
2246 map = long_symbol_map(); 2269 map = long_symbol_map();
2247 } 2270 }
2248 size = SeqTwoByteString::SizeFor(chars); 2271 size = SeqTwoByteString::SizeFor(chars);
2249 } 2272 }
2250 2273
2251 // Allocate string. 2274 // Allocate string.
2252 AllocationSpace space = 2275 AllocationSpace space =
2253 (size > MaxHeapObjectSize()) ? LO_SPACE : OLD_DATA_SPACE; 2276 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_DATA_SPACE;
2254 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE); 2277 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
2255 if (result->IsFailure()) return result; 2278 if (result->IsFailure()) return result;
2256 2279
2257 reinterpret_cast<HeapObject*>(result)->set_map(map); 2280 reinterpret_cast<HeapObject*>(result)->set_map(map);
2258 // The hash value contains the length of the string. 2281 // The hash value contains the length of the string.
2259 String* answer = String::cast(result); 2282 String* answer = String::cast(result);
2260 answer->set_length_field(length_field); 2283 answer->set_length_field(length_field);
2261 2284
2262 ASSERT_EQ(size, answer->Size()); 2285 ASSERT_EQ(size, answer->Size());
2263 2286
2264 // Fill in the characters. 2287 // Fill in the characters.
2265 for (int i = 0; i < chars; i++) { 2288 for (int i = 0; i < chars; i++) {
2266 answer->Set(i, buffer->GetNext()); 2289 answer->Set(i, buffer->GetNext());
2267 } 2290 }
2268 return answer; 2291 return answer;
2269 } 2292 }
2270 2293
2271 2294
2272 Object* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) { 2295 Object* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
2273 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 2296 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2274 int size = SeqAsciiString::SizeFor(length); 2297 int size = SeqAsciiString::SizeFor(length);
2275 if (size > MaxHeapObjectSize()) { 2298
2276 space = LO_SPACE; 2299 Object* result = Failure::OutOfMemoryException();
2300 if (space == NEW_SPACE) {
2301 result = size <= kMaxObjectSizeInNewSpace
2302 ? new_space_.AllocateRaw(size)
2303 : lo_space_->AllocateRawFixedArray(size);
2304 } else {
2305 if (size > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
2306 result = AllocateRaw(size, space, OLD_DATA_SPACE);
2277 } 2307 }
2278
2279 // Use AllocateRaw rather than Allocate because the object's size cannot be
2280 // determined from the map.
2281 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
2282 if (result->IsFailure()) return result; 2308 if (result->IsFailure()) return result;
2283 2309
2284 // Determine the map based on the string's length. 2310 // Determine the map based on the string's length.
2285 Map* map; 2311 Map* map;
2286 if (length <= String::kMaxShortStringSize) { 2312 if (length <= String::kMaxShortStringSize) {
2287 map = short_ascii_string_map(); 2313 map = short_ascii_string_map();
2288 } else if (length <= String::kMaxMediumStringSize) { 2314 } else if (length <= String::kMaxMediumStringSize) {
2289 map = medium_ascii_string_map(); 2315 map = medium_ascii_string_map();
2290 } else { 2316 } else {
2291 map = long_ascii_string_map(); 2317 map = long_ascii_string_map();
2292 } 2318 }
2293 2319
2294 // Partially initialize the object. 2320 // Partially initialize the object.
2295 HeapObject::cast(result)->set_map(map); 2321 HeapObject::cast(result)->set_map(map);
2296 String::cast(result)->set_length(length); 2322 String::cast(result)->set_length(length);
2297 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 2323 ASSERT_EQ(size, HeapObject::cast(result)->Size());
2298 return result; 2324 return result;
2299 } 2325 }
2300 2326
2301 2327
2302 Object* Heap::AllocateRawTwoByteString(int length, PretenureFlag pretenure) { 2328 Object* Heap::AllocateRawTwoByteString(int length, PretenureFlag pretenure) {
2303 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 2329 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2304 int size = SeqTwoByteString::SizeFor(length); 2330 int size = SeqTwoByteString::SizeFor(length);
2305 if (size > MaxHeapObjectSize()) { 2331
2306 space = LO_SPACE; 2332 Object* result = Failure::OutOfMemoryException();
2333 if (space == NEW_SPACE) {
2334 result = size <= kMaxObjectSizeInNewSpace
2335 ? new_space_.AllocateRaw(size)
2336 : lo_space_->AllocateRawFixedArray(size);
2337 } else {
2338 if (size > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
2339 result = AllocateRaw(size, space, OLD_DATA_SPACE);
2307 } 2340 }
2308
2309 // Use AllocateRaw rather than Allocate because the object's size cannot be
2310 // determined from the map.
2311 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
2312 if (result->IsFailure()) return result; 2341 if (result->IsFailure()) return result;
2313 2342
2314 // Determine the map based on the string's length. 2343 // Determine the map based on the string's length.
2315 Map* map; 2344 Map* map;
2316 if (length <= String::kMaxShortStringSize) { 2345 if (length <= String::kMaxShortStringSize) {
2317 map = short_string_map(); 2346 map = short_string_map();
2318 } else if (length <= String::kMaxMediumStringSize) { 2347 } else if (length <= String::kMaxMediumStringSize) {
2319 map = medium_string_map(); 2348 map = medium_string_map();
2320 } else { 2349 } else {
2321 map = long_string_map(); 2350 map = long_string_map();
(...skipping 16 matching lines...) Expand all
2338 reinterpret_cast<Array*>(result)->set_length(0); 2367 reinterpret_cast<Array*>(result)->set_length(0);
2339 return result; 2368 return result;
2340 } 2369 }
2341 2370
2342 2371
2343 Object* Heap::AllocateRawFixedArray(int length) { 2372 Object* Heap::AllocateRawFixedArray(int length) {
2344 // Use the general function if we're forced to always allocate. 2373 // Use the general function if we're forced to always allocate.
2345 if (always_allocate()) return AllocateFixedArray(length, NOT_TENURED); 2374 if (always_allocate()) return AllocateFixedArray(length, NOT_TENURED);
2346 // Allocate the raw data for a fixed array. 2375 // Allocate the raw data for a fixed array.
2347 int size = FixedArray::SizeFor(length); 2376 int size = FixedArray::SizeFor(length);
2348 return (size > MaxHeapObjectSize()) 2377 return size <= kMaxObjectSizeInNewSpace
2349 ? lo_space_->AllocateRawFixedArray(size) 2378 ? new_space_.AllocateRaw(size)
2350 : new_space_.AllocateRaw(size); 2379 : lo_space_->AllocateRawFixedArray(size);
2351 } 2380 }
2352 2381
2353 2382
2354 Object* Heap::CopyFixedArray(FixedArray* src) { 2383 Object* Heap::CopyFixedArray(FixedArray* src) {
2355 int len = src->length(); 2384 int len = src->length();
2356 Object* obj = AllocateRawFixedArray(len); 2385 Object* obj = AllocateRawFixedArray(len);
2357 if (obj->IsFailure()) return obj; 2386 if (obj->IsFailure()) return obj;
2358 if (Heap::InNewSpace(obj)) { 2387 if (Heap::InNewSpace(obj)) {
2359 HeapObject* dst = HeapObject::cast(obj); 2388 HeapObject* dst = HeapObject::cast(obj);
2360 CopyBlock(reinterpret_cast<Object**>(dst->address()), 2389 CopyBlock(reinterpret_cast<Object**>(dst->address()),
(...skipping 27 matching lines...) Expand all
2388 } 2417 }
2389 return result; 2418 return result;
2390 } 2419 }
2391 2420
2392 2421
2393 Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { 2422 Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
2394 ASSERT(empty_fixed_array()->IsFixedArray()); 2423 ASSERT(empty_fixed_array()->IsFixedArray());
2395 if (length == 0) return empty_fixed_array(); 2424 if (length == 0) return empty_fixed_array();
2396 2425
2397 int size = FixedArray::SizeFor(length); 2426 int size = FixedArray::SizeFor(length);
2398 Object* result; 2427 Object* result = Failure::OutOfMemoryException();
2399 if (size > MaxHeapObjectSize()) { 2428 if (pretenure != TENURED) {
2400 result = lo_space_->AllocateRawFixedArray(size); 2429 result = size <= kMaxObjectSizeInNewSpace
2401 } else { 2430 ? new_space_.AllocateRaw(size)
2402 AllocationSpace space = 2431 : lo_space_->AllocateRawFixedArray(size);
2403 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2404 result = AllocateRaw(size, space, OLD_POINTER_SPACE);
2405 } 2432 }
2406 if (result->IsFailure()) return result; 2433 if (result->IsFailure()) {
2407 2434 if (size > MaxObjectSizeInPagedSpace()) {
2435 result = lo_space_->AllocateRawFixedArray(size);
2436 } else {
2437 AllocationSpace space =
2438 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2439 result = AllocateRaw(size, space, OLD_POINTER_SPACE);
2440 }
2441 if (result->IsFailure()) return result;
2442 }
2408 // Initialize the object. 2443 // Initialize the object.
2409 reinterpret_cast<Array*>(result)->set_map(fixed_array_map()); 2444 reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2410 FixedArray* array = FixedArray::cast(result); 2445 FixedArray* array = FixedArray::cast(result);
2411 array->set_length(length); 2446 array->set_length(length);
2412 Object* value = undefined_value(); 2447 Object* value = undefined_value();
2413 for (int index = 0; index < length; index++) { 2448 for (int index = 0; index < length; index++) {
2414 array->set(index, value, SKIP_WRITE_BARRIER); 2449 array->set(index, value, SKIP_WRITE_BARRIER);
2415 } 2450 }
2416 return array; 2451 return array;
2417 } 2452 }
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
2497 switch (type) { 2532 switch (type) {
2498 #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break; 2533 #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
2499 STRUCT_LIST(MAKE_CASE) 2534 STRUCT_LIST(MAKE_CASE)
2500 #undef MAKE_CASE 2535 #undef MAKE_CASE
2501 default: 2536 default:
2502 UNREACHABLE(); 2537 UNREACHABLE();
2503 return Failure::InternalError(); 2538 return Failure::InternalError();
2504 } 2539 }
2505 int size = map->instance_size(); 2540 int size = map->instance_size();
2506 AllocationSpace space = 2541 AllocationSpace space =
2507 (size > MaxHeapObjectSize()) ? LO_SPACE : OLD_POINTER_SPACE; 2542 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
2508 Object* result = Heap::Allocate(map, space); 2543 Object* result = Heap::Allocate(map, space);
2509 if (result->IsFailure()) return result; 2544 if (result->IsFailure()) return result;
2510 Struct::cast(result)->InitializeBody(size); 2545 Struct::cast(result)->InitializeBody(size);
2511 return result; 2546 return result;
2512 } 2547 }
2513 2548
2514 2549
2515 #ifdef DEBUG 2550 #ifdef DEBUG
2516 2551
2517 void Heap::Print() { 2552 void Heap::Print() {
(...skipping 1002 matching lines...) Expand 10 before | Expand all | Expand 10 after
3520 #ifdef DEBUG 3555 #ifdef DEBUG
3521 bool Heap::GarbageCollectionGreedyCheck() { 3556 bool Heap::GarbageCollectionGreedyCheck() {
3522 ASSERT(FLAG_gc_greedy); 3557 ASSERT(FLAG_gc_greedy);
3523 if (Bootstrapper::IsActive()) return true; 3558 if (Bootstrapper::IsActive()) return true;
3524 if (disallow_allocation_failure()) return true; 3559 if (disallow_allocation_failure()) return true;
3525 return CollectGarbage(0, NEW_SPACE); 3560 return CollectGarbage(0, NEW_SPACE);
3526 } 3561 }
3527 #endif 3562 #endif
3528 3563
3529 } } // namespace v8::internal 3564 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/mark-compact.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698